2008-07-06 Kai Tietz <kai.tietz@onevision.com>
[official-gcc.git] / gcc / builtins.c
blob2d6ad2a1a34eed1d21fe5b9524ad32fdc27c1d44
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tree-gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
67 #undef DEF_BUILTIN
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
85 #endif
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
187 enum tree_code);
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
239 #endif
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
249 return true;
250 if (strncmp (name, "__sync_", 7) == 0)
251 return true;
252 return false;
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
270 return 0;
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
273 return 0;
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
278 while (1)
280 switch (TREE_CODE (exp))
282 CASE_CONVERT:
283 exp = TREE_OPERAND (exp, 0);
284 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
285 return align;
287 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
288 align = MIN (inner, max_align);
289 break;
291 case POINTER_PLUS_EXPR:
292 /* If sum of pointer + int, restrict our maximum alignment to that
293 imposed by the integer. If not, we can't do any better than
294 ALIGN. */
295 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
296 return align;
298 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
299 & (max_align / BITS_PER_UNIT - 1))
300 != 0)
301 max_align >>= 1;
303 exp = TREE_OPERAND (exp, 0);
304 break;
306 case ADDR_EXPR:
307 /* See what we are pointing at and look at its alignment. */
308 exp = TREE_OPERAND (exp, 0);
309 inner = max_align;
310 if (handled_component_p (exp))
312 HOST_WIDE_INT bitsize, bitpos;
313 tree offset;
314 enum machine_mode mode;
315 int unsignedp, volatilep;
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
319 if (bitpos)
320 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
321 if (offset && TREE_CODE (offset) == PLUS_EXPR
322 && host_integerp (TREE_OPERAND (offset, 1), 1))
324 /* Any overflow in calculating offset_bits won't change
325 the alignment. */
326 unsigned offset_bits
327 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
328 * BITS_PER_UNIT);
330 if (offset_bits)
331 inner = MIN (inner, (offset_bits & -offset_bits));
332 offset = TREE_OPERAND (offset, 0);
334 if (offset && TREE_CODE (offset) == MULT_EXPR
335 && host_integerp (TREE_OPERAND (offset, 1), 1))
337 /* Any overflow in calculating offset_factor won't change
338 the alignment. */
339 unsigned offset_factor
340 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
341 * BITS_PER_UNIT);
343 if (offset_factor)
344 inner = MIN (inner, (offset_factor & -offset_factor));
346 else if (offset)
347 inner = MIN (inner, BITS_PER_UNIT);
349 if (DECL_P (exp))
350 align = MIN (inner, DECL_ALIGN (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 else if (CONSTANT_CLASS_P (exp))
353 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
354 #endif
355 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
356 || TREE_CODE (exp) == INDIRECT_REF)
357 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
358 else
359 align = MIN (align, inner);
360 return MIN (align, max_align);
362 default:
363 return align;
368 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
369 way, because it could contain a zero byte in the middle.
370 TREE_STRING_LENGTH is the size of the character array, not the string.
372 ONLY_VALUE should be nonzero if the result is not going to be emitted
373 into the instruction stream and zero if it is going to be expanded.
374 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
375 is returned, otherwise NULL, since
376 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
377 evaluate the side-effects.
379 The value returned is of type `ssizetype'.
381 Unfortunately, string_constant can't access the values of const char
382 arrays with initializers, so neither can we do so here. */
384 tree
385 c_strlen (tree src, int only_value)
387 tree offset_node;
388 HOST_WIDE_INT offset;
389 int max;
390 const char *ptr;
392 STRIP_NOPS (src);
393 if (TREE_CODE (src) == COND_EXPR
394 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
396 tree len1, len2;
398 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
399 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
400 if (tree_int_cst_equal (len1, len2))
401 return len1;
404 if (TREE_CODE (src) == COMPOUND_EXPR
405 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
406 return c_strlen (TREE_OPERAND (src, 1), only_value);
408 src = string_constant (src, &offset_node);
409 if (src == 0)
410 return NULL_TREE;
412 max = TREE_STRING_LENGTH (src) - 1;
413 ptr = TREE_STRING_POINTER (src);
415 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
417 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
418 compute the offset to the following null if we don't know where to
419 start searching for it. */
420 int i;
422 for (i = 0; i < max; i++)
423 if (ptr[i] == 0)
424 return NULL_TREE;
426 /* We don't know the starting offset, but we do know that the string
427 has no internal zero bytes. We can assume that the offset falls
428 within the bounds of the string; otherwise, the programmer deserves
429 what he gets. Subtract the offset from the length of the string,
430 and return that. This would perhaps not be valid if we were dealing
431 with named arrays in addition to literal string constants. */
433 return size_diffop (size_int (max), offset_node);
436 /* We have a known offset into the string. Start searching there for
437 a null character if we can represent it as a single HOST_WIDE_INT. */
438 if (offset_node == 0)
439 offset = 0;
440 else if (! host_integerp (offset_node, 0))
441 offset = -1;
442 else
443 offset = tree_low_cst (offset_node, 0);
445 /* If the offset is known to be out of bounds, warn, and call strlen at
446 runtime. */
447 if (offset < 0 || offset > max)
449 /* Suppress multiple warnings for propagated constant strings. */
450 if (! TREE_NO_WARNING (src))
452 warning (0, "offset outside bounds of constant string");
453 TREE_NO_WARNING (src) = 1;
455 return NULL_TREE;
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
470 static const char *
471 c_getstr (tree src)
473 tree offset_node;
475 src = string_constant (src, &offset_node);
476 if (src == 0)
477 return 0;
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
483 return 0;
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
491 static rtx
492 c_readstr (const char *str, enum machine_mode mode)
494 HOST_WIDE_INT c[2];
495 HOST_WIDE_INT ch;
496 unsigned int i, j;
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
500 c[0] = 0;
501 c[1] = 0;
502 ch = 1;
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
505 j = i;
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
511 j *= BITS_PER_UNIT;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
514 if (ch)
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
518 return immed_double_const (c[0], c[1], mode);
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
523 P. */
525 static int
526 target_char_cast (tree cst, char *p)
528 unsigned HOST_WIDE_INT val, hostval;
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
532 return 1;
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
538 hostval = val;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
542 if (val != hostval)
543 return 1;
545 *p = hostval;
546 return 0;
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
553 static tree
554 builtin_save_expr (tree exp)
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
559 return exp;
561 return save_expr (exp);
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
568 static rtx
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
571 int i;
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
575 #else
576 rtx tem;
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
589 else
591 tem = hard_frame_pointer_rtx;
593 /* Tell reload not to eliminate the frame pointer. */
594 crtl->accesses_prior_frames = 1;
596 #endif
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
602 if (count > 0)
603 SETUP_FRAME_ADDRESSES ();
604 #endif
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
612 count--;
613 #endif
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
622 #endif
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
633 #else
634 return tem;
635 #endif
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
640 #else
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
644 #endif
645 return tem;
648 /* Alias set used for setjmp buffer. */
649 static alias_set_type setjmp_alias_set = -1;
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
655 void
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
659 rtx stack_save;
660 rtx mem;
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
665 buf_addr = convert_memory_address (Pmode, buf_addr);
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
693 #endif
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 cfun->calls_setjmp = 1;
699 /* We have a nonlocal label. */
700 cfun->has_nonlocal_label = 1;
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
706 void
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_use (hard_frame_pointer_rtx);
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_clobber (static_chain_rtx);
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
721 #endif
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_clobber (hard_frame_pointer_rtx);
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
732 #ifdef ELIMINABLE_REGS
733 size_t i;
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
739 break;
741 if (i == ARRAY_SIZE (elim_regs))
742 #endif
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx,
747 copy_to_reg (get_arg_pointer_save_area ()));
750 #endif
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
755 else
756 #endif
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
760 else
761 #endif
762 { /* Nothing */ }
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
775 static void
776 expand_builtin_longjmp (rtx buf_addr, rtx value)
778 rtx fp, lab, stack, insn, last;
779 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 if (setjmp_alias_set == -1)
782 setjmp_alias_set = new_alias_set ();
784 buf_addr = convert_memory_address (Pmode, buf_addr);
786 buf_addr = force_reg (Pmode, buf_addr);
788 /* We used to store value in static_chain_rtx, but that fails if pointers
789 are smaller than integers. We instead require that the user must pass
790 a second argument of 1, because that is what builtin_setjmp will
791 return. This also makes EH slightly more efficient, since we are no
792 longer copying around a value that we don't care about. */
793 gcc_assert (value == const1_rtx);
795 last = get_last_insn ();
796 #ifdef HAVE_builtin_longjmp
797 if (HAVE_builtin_longjmp)
798 emit_insn (gen_builtin_longjmp (buf_addr));
799 else
800 #endif
802 fp = gen_rtx_MEM (Pmode, buf_addr);
803 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
804 GET_MODE_SIZE (Pmode)));
806 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (fp, setjmp_alias_set);
809 set_mem_alias_set (lab, setjmp_alias_set);
810 set_mem_alias_set (stack, setjmp_alias_set);
812 /* Pick up FP, label, and SP from the block and jump. This code is
813 from expand_goto in stmt.c; see there for detailed comments. */
814 #ifdef HAVE_nonlocal_goto
815 if (HAVE_nonlocal_goto)
816 /* We have to pass a value to the nonlocal_goto pattern that will
817 get copied into the static_chain pointer, but it does not matter
818 what that value is, because builtin_setjmp does not use it. */
819 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
820 else
821 #endif
823 lab = copy_to_reg (lab);
825 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
826 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
828 emit_move_insn (hard_frame_pointer_rtx, fp);
829 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
831 emit_use (hard_frame_pointer_rtx);
832 emit_use (stack_pointer_rtx);
833 emit_indirect_jump (lab);
837 /* Search backwards and mark the jump insn as a non-local goto.
838 Note that this precludes the use of __builtin_longjmp to a
839 __builtin_setjmp target in the same function. However, we've
840 already cautioned the user that these functions are for
841 internal exception handling use only. */
842 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
844 gcc_assert (insn != last);
846 if (JUMP_P (insn))
848 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
849 break;
851 else if (CALL_P (insn))
852 break;
856 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
857 and the address of the save area. */
859 static rtx
860 expand_builtin_nonlocal_goto (tree exp)
862 tree t_label, t_save_area;
863 rtx r_label, r_save_area, r_fp, r_sp, insn;
865 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
866 return NULL_RTX;
868 t_label = CALL_EXPR_ARG (exp, 0);
869 t_save_area = CALL_EXPR_ARG (exp, 1);
871 r_label = expand_normal (t_label);
872 r_label = convert_memory_address (Pmode, r_label);
873 r_save_area = expand_normal (t_save_area);
874 r_save_area = convert_memory_address (Pmode, r_save_area);
875 /* Copy the address of the save location to a register just in case it was based
876 on the frame pointer. */
877 r_save_area = copy_to_reg (r_save_area);
878 r_fp = gen_rtx_MEM (Pmode, r_save_area);
879 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
880 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
882 crtl->has_nonlocal_goto = 1;
884 #ifdef HAVE_nonlocal_goto
885 /* ??? We no longer need to pass the static chain value, afaik. */
886 if (HAVE_nonlocal_goto)
887 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
888 else
889 #endif
891 r_label = copy_to_reg (r_label);
893 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
894 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
896 /* Restore frame pointer for containing function.
897 This sets the actual hard register used for the frame pointer
898 to the location of the function's incoming static chain info.
899 The non-local goto handler will then adjust it to contain the
900 proper value and reload the argument pointer, if needed. */
901 emit_move_insn (hard_frame_pointer_rtx, r_fp);
902 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
904 /* USE of hard_frame_pointer_rtx added for consistency;
905 not clear if really needed. */
906 emit_use (hard_frame_pointer_rtx);
907 emit_use (stack_pointer_rtx);
909 /* If the architecture is using a GP register, we must
910 conservatively assume that the target function makes use of it.
911 The prologue of functions with nonlocal gotos must therefore
912 initialize the GP register to the appropriate value, and we
913 must then make sure that this value is live at the point
914 of the jump. (Note that this doesn't necessarily apply
915 to targets with a nonlocal_goto pattern; they are free
916 to implement it in their own way. Note also that this is
917 a no-op if the GP register is a global invariant.) */
918 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
919 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
920 emit_use (pic_offset_table_rtx);
922 emit_indirect_jump (r_label);
925 /* Search backwards to the jump insn and mark it as a
926 non-local goto. */
927 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
929 if (JUMP_P (insn))
931 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
932 break;
934 else if (CALL_P (insn))
935 break;
938 return const0_rtx;
941 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
942 (not all will be used on all machines) that was passed to __builtin_setjmp.
943 It updates the stack pointer in that block to correspond to the current
944 stack pointer. */
946 static void
947 expand_builtin_update_setjmp_buf (rtx buf_addr)
949 enum machine_mode sa_mode = Pmode;
950 rtx stack_save;
953 #ifdef HAVE_save_stack_nonlocal
954 if (HAVE_save_stack_nonlocal)
955 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
956 #endif
957 #ifdef STACK_SAVEAREA_MODE
958 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
959 #endif
961 stack_save
962 = gen_rtx_MEM (sa_mode,
963 memory_address
964 (sa_mode,
965 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
967 #ifdef HAVE_setjmp
968 if (HAVE_setjmp)
969 emit_insn (gen_setjmp ());
970 #endif
972 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
975 /* Expand a call to __builtin_prefetch. For a target that does not support
976 data prefetch, evaluate the memory address argument in case it has side
977 effects. */
979 static void
980 expand_builtin_prefetch (tree exp)
982 tree arg0, arg1, arg2;
983 int nargs;
984 rtx op0, op1, op2;
986 if (!validate_arglist (exp, POINTER_TYPE, 0))
987 return;
989 arg0 = CALL_EXPR_ARG (exp, 0);
991 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
992 zero (read) and argument 2 (locality) defaults to 3 (high degree of
993 locality). */
994 nargs = call_expr_nargs (exp);
995 if (nargs > 1)
996 arg1 = CALL_EXPR_ARG (exp, 1);
997 else
998 arg1 = integer_zero_node;
999 if (nargs > 2)
1000 arg2 = CALL_EXPR_ARG (exp, 2);
1001 else
1002 arg2 = build_int_cst (NULL_TREE, 3);
1004 /* Argument 0 is an address. */
1005 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1007 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1008 if (TREE_CODE (arg1) != INTEGER_CST)
1010 error ("second argument to %<__builtin_prefetch%> must be a constant");
1011 arg1 = integer_zero_node;
1013 op1 = expand_normal (arg1);
1014 /* Argument 1 must be either zero or one. */
1015 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1017 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1018 " using zero");
1019 op1 = const0_rtx;
1022 /* Argument 2 (locality) must be a compile-time constant int. */
1023 if (TREE_CODE (arg2) != INTEGER_CST)
1025 error ("third argument to %<__builtin_prefetch%> must be a constant");
1026 arg2 = integer_zero_node;
1028 op2 = expand_normal (arg2);
1029 /* Argument 2 must be 0, 1, 2, or 3. */
1030 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1032 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1033 op2 = const0_rtx;
1036 #ifdef HAVE_prefetch
1037 if (HAVE_prefetch)
1039 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1040 (op0,
1041 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1042 || (GET_MODE (op0) != Pmode))
1044 op0 = convert_memory_address (Pmode, op0);
1045 op0 = force_reg (Pmode, op0);
1047 emit_insn (gen_prefetch (op0, op1, op2));
1049 #endif
1051 /* Don't do anything with direct references to volatile memory, but
1052 generate code to handle other side effects. */
1053 if (!MEM_P (op0) && side_effects_p (op0))
1054 emit_insn (op0);
1057 /* Get a MEM rtx for expression EXP which is the address of an operand
1058 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1059 the maximum length of the block of memory that might be accessed or
1060 NULL if unknown. */
1062 static rtx
1063 get_memory_rtx (tree exp, tree len)
1065 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1066 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1068 /* Get an expression we can use to find the attributes to assign to MEM.
1069 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1070 we can. First remove any nops. */
1071 while (CONVERT_EXPR_P (exp)
1072 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1073 exp = TREE_OPERAND (exp, 0);
1075 if (TREE_CODE (exp) == ADDR_EXPR)
1076 exp = TREE_OPERAND (exp, 0);
1077 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1078 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1079 else
1080 exp = NULL;
1082 /* Honor attributes derived from exp, except for the alias set
1083 (as builtin stringops may alias with anything) and the size
1084 (as stringops may access multiple array elements). */
1085 if (exp)
1087 set_mem_attributes (mem, exp, 0);
1089 /* Allow the string and memory builtins to overflow from one
1090 field into another, see http://gcc.gnu.org/PR23561.
1091 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1092 memory accessed by the string or memory builtin will fit
1093 within the field. */
1094 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1096 tree mem_expr = MEM_EXPR (mem);
1097 HOST_WIDE_INT offset = -1, length = -1;
1098 tree inner = exp;
1100 while (TREE_CODE (inner) == ARRAY_REF
1101 || CONVERT_EXPR_P (inner)
1102 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1103 || TREE_CODE (inner) == SAVE_EXPR)
1104 inner = TREE_OPERAND (inner, 0);
1106 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1108 if (MEM_OFFSET (mem)
1109 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1110 offset = INTVAL (MEM_OFFSET (mem));
1112 if (offset >= 0 && len && host_integerp (len, 0))
1113 length = tree_low_cst (len, 0);
1115 while (TREE_CODE (inner) == COMPONENT_REF)
1117 tree field = TREE_OPERAND (inner, 1);
1118 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1119 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1121 /* Bitfields are generally not byte-addressable. */
1122 gcc_assert (!DECL_BIT_FIELD (field)
1123 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1124 % BITS_PER_UNIT) == 0
1125 && host_integerp (DECL_SIZE (field), 0)
1126 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1127 % BITS_PER_UNIT) == 0));
1129 /* If we can prove that the memory starting at XEXP (mem, 0) and
1130 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1131 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1132 fields without DECL_SIZE_UNIT like flexible array members. */
1133 if (length >= 0
1134 && DECL_SIZE_UNIT (field)
1135 && host_integerp (DECL_SIZE_UNIT (field), 0))
1137 HOST_WIDE_INT size
1138 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1139 if (offset <= size
1140 && length <= size
1141 && offset + length <= size)
1142 break;
1145 if (offset >= 0
1146 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1147 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1148 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1149 / BITS_PER_UNIT;
1150 else
1152 offset = -1;
1153 length = -1;
1156 mem_expr = TREE_OPERAND (mem_expr, 0);
1157 inner = TREE_OPERAND (inner, 0);
1160 if (mem_expr == NULL)
1161 offset = -1;
1162 if (mem_expr != MEM_EXPR (mem))
1164 set_mem_expr (mem, mem_expr);
1165 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1168 set_mem_alias_set (mem, 0);
1169 set_mem_size (mem, NULL_RTX);
1172 return mem;
1175 /* Built-in functions to perform an untyped call and return. */
1177 /* For each register that may be used for calling a function, this
1178 gives a mode used to copy the register's value. VOIDmode indicates
1179 the register is not used for calling a function. If the machine
1180 has register windows, this gives only the outbound registers.
1181 INCOMING_REGNO gives the corresponding inbound register. */
1182 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1184 /* For each register that may be used for returning values, this gives
1185 a mode used to copy the register's value. VOIDmode indicates the
1186 register is not used for returning values. If the machine has
1187 register windows, this gives only the outbound registers.
1188 INCOMING_REGNO gives the corresponding inbound register. */
1189 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1191 /* For each register that may be used for calling a function, this
1192 gives the offset of that register into the block returned by
1193 __builtin_apply_args. 0 indicates that the register is not
1194 used for calling a function. */
1195 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1197 /* Return the size required for the block returned by __builtin_apply_args,
1198 and initialize apply_args_mode. */
1200 static int
1201 apply_args_size (void)
1203 static int size = -1;
1204 int align;
1205 unsigned int regno;
1206 enum machine_mode mode;
1208 /* The values computed by this function never change. */
1209 if (size < 0)
1211 /* The first value is the incoming arg-pointer. */
1212 size = GET_MODE_SIZE (Pmode);
1214 /* The second value is the structure value address unless this is
1215 passed as an "invisible" first argument. */
1216 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1217 size += GET_MODE_SIZE (Pmode);
1219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1220 if (FUNCTION_ARG_REGNO_P (regno))
1222 mode = reg_raw_mode[regno];
1224 gcc_assert (mode != VOIDmode);
1226 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1227 if (size % align != 0)
1228 size = CEIL (size, align) * align;
1229 apply_args_reg_offset[regno] = size;
1230 size += GET_MODE_SIZE (mode);
1231 apply_args_mode[regno] = mode;
1233 else
1235 apply_args_mode[regno] = VOIDmode;
1236 apply_args_reg_offset[regno] = 0;
1239 return size;
1242 /* Return the size required for the block returned by __builtin_apply,
1243 and initialize apply_result_mode. */
1245 static int
1246 apply_result_size (void)
1248 static int size = -1;
1249 int align, regno;
1250 enum machine_mode mode;
1252 /* The values computed by this function never change. */
1253 if (size < 0)
1255 size = 0;
1257 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1258 if (FUNCTION_VALUE_REGNO_P (regno))
1260 mode = reg_raw_mode[regno];
1262 gcc_assert (mode != VOIDmode);
1264 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1265 if (size % align != 0)
1266 size = CEIL (size, align) * align;
1267 size += GET_MODE_SIZE (mode);
1268 apply_result_mode[regno] = mode;
1270 else
1271 apply_result_mode[regno] = VOIDmode;
1273 /* Allow targets that use untyped_call and untyped_return to override
1274 the size so that machine-specific information can be stored here. */
1275 #ifdef APPLY_RESULT_SIZE
1276 size = APPLY_RESULT_SIZE;
1277 #endif
1279 return size;
1282 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1283 /* Create a vector describing the result block RESULT. If SAVEP is true,
1284 the result block is used to save the values; otherwise it is used to
1285 restore the values. */
1287 static rtx
1288 result_vector (int savep, rtx result)
1290 int regno, size, align, nelts;
1291 enum machine_mode mode;
1292 rtx reg, mem;
1293 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1295 size = nelts = 0;
1296 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1297 if ((mode = apply_result_mode[regno]) != VOIDmode)
1299 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1300 if (size % align != 0)
1301 size = CEIL (size, align) * align;
1302 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1303 mem = adjust_address (result, mode, size);
1304 savevec[nelts++] = (savep
1305 ? gen_rtx_SET (VOIDmode, mem, reg)
1306 : gen_rtx_SET (VOIDmode, reg, mem));
1307 size += GET_MODE_SIZE (mode);
1309 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1311 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1313 /* Save the state required to perform an untyped call with the same
1314 arguments as were passed to the current function. */
1316 static rtx
1317 expand_builtin_apply_args_1 (void)
1319 rtx registers, tem;
1320 int size, align, regno;
1321 enum machine_mode mode;
1322 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1324 /* Create a block where the arg-pointer, structure value address,
1325 and argument registers can be saved. */
1326 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1328 /* Walk past the arg-pointer and structure value address. */
1329 size = GET_MODE_SIZE (Pmode);
1330 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1331 size += GET_MODE_SIZE (Pmode);
1333 /* Save each register used in calling a function to the block. */
1334 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1335 if ((mode = apply_args_mode[regno]) != VOIDmode)
1337 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1338 if (size % align != 0)
1339 size = CEIL (size, align) * align;
1341 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1343 emit_move_insn (adjust_address (registers, mode, size), tem);
1344 size += GET_MODE_SIZE (mode);
1347 /* Save the arg pointer to the block. */
1348 tem = copy_to_reg (virtual_incoming_args_rtx);
1349 #ifdef STACK_GROWS_DOWNWARD
1350 /* We need the pointer as the caller actually passed them to us, not
1351 as we might have pretended they were passed. Make sure it's a valid
1352 operand, as emit_move_insn isn't expected to handle a PLUS. */
1354 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1355 NULL_RTX);
1356 #endif
1357 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1359 size = GET_MODE_SIZE (Pmode);
1361 /* Save the structure value address unless this is passed as an
1362 "invisible" first argument. */
1363 if (struct_incoming_value)
1365 emit_move_insn (adjust_address (registers, Pmode, size),
1366 copy_to_reg (struct_incoming_value));
1367 size += GET_MODE_SIZE (Pmode);
1370 /* Return the address of the block. */
1371 return copy_addr_to_reg (XEXP (registers, 0));
1374 /* __builtin_apply_args returns block of memory allocated on
1375 the stack into which is stored the arg pointer, structure
1376 value address, static chain, and all the registers that might
1377 possibly be used in performing a function call. The code is
1378 moved to the start of the function so the incoming values are
1379 saved. */
1381 static rtx
1382 expand_builtin_apply_args (void)
1384 /* Don't do __builtin_apply_args more than once in a function.
1385 Save the result of the first call and reuse it. */
1386 if (apply_args_value != 0)
1387 return apply_args_value;
1389 /* When this function is called, it means that registers must be
1390 saved on entry to this function. So we migrate the
1391 call to the first insn of this function. */
1392 rtx temp;
1393 rtx seq;
1395 start_sequence ();
1396 temp = expand_builtin_apply_args_1 ();
1397 seq = get_insns ();
1398 end_sequence ();
1400 apply_args_value = temp;
1402 /* Put the insns after the NOTE that starts the function.
1403 If this is inside a start_sequence, make the outer-level insn
1404 chain current, so the code is placed at the start of the
1405 function. */
1406 push_topmost_sequence ();
1407 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1408 pop_topmost_sequence ();
1409 return temp;
1413 /* Perform an untyped call and save the state required to perform an
1414 untyped return of whatever value was returned by the given function. */
1416 static rtx
1417 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1419 int size, align, regno;
1420 enum machine_mode mode;
1421 rtx incoming_args, result, reg, dest, src, call_insn;
1422 rtx old_stack_level = 0;
1423 rtx call_fusage = 0;
1424 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1426 arguments = convert_memory_address (Pmode, arguments);
1428 /* Create a block where the return registers can be saved. */
1429 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1431 /* Fetch the arg pointer from the ARGUMENTS block. */
1432 incoming_args = gen_reg_rtx (Pmode);
1433 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1434 #ifndef STACK_GROWS_DOWNWARD
1435 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1436 incoming_args, 0, OPTAB_LIB_WIDEN);
1437 #endif
1439 /* Push a new argument block and copy the arguments. Do not allow
1440 the (potential) memcpy call below to interfere with our stack
1441 manipulations. */
1442 do_pending_stack_adjust ();
1443 NO_DEFER_POP;
1445 /* Save the stack with nonlocal if available. */
1446 #ifdef HAVE_save_stack_nonlocal
1447 if (HAVE_save_stack_nonlocal)
1448 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1449 else
1450 #endif
1451 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1453 /* Allocate a block of memory onto the stack and copy the memory
1454 arguments to the outgoing arguments address. */
1455 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1456 dest = virtual_outgoing_args_rtx;
1457 #ifndef STACK_GROWS_DOWNWARD
1458 if (GET_CODE (argsize) == CONST_INT)
1459 dest = plus_constant (dest, -INTVAL (argsize));
1460 else
1461 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1462 #endif
1463 dest = gen_rtx_MEM (BLKmode, dest);
1464 set_mem_align (dest, PARM_BOUNDARY);
1465 src = gen_rtx_MEM (BLKmode, incoming_args);
1466 set_mem_align (src, PARM_BOUNDARY);
1467 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1469 /* Refer to the argument block. */
1470 apply_args_size ();
1471 arguments = gen_rtx_MEM (BLKmode, arguments);
1472 set_mem_align (arguments, PARM_BOUNDARY);
1474 /* Walk past the arg-pointer and structure value address. */
1475 size = GET_MODE_SIZE (Pmode);
1476 if (struct_value)
1477 size += GET_MODE_SIZE (Pmode);
1479 /* Restore each of the registers previously saved. Make USE insns
1480 for each of these registers for use in making the call. */
1481 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1482 if ((mode = apply_args_mode[regno]) != VOIDmode)
1484 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1485 if (size % align != 0)
1486 size = CEIL (size, align) * align;
1487 reg = gen_rtx_REG (mode, regno);
1488 emit_move_insn (reg, adjust_address (arguments, mode, size));
1489 use_reg (&call_fusage, reg);
1490 size += GET_MODE_SIZE (mode);
1493 /* Restore the structure value address unless this is passed as an
1494 "invisible" first argument. */
1495 size = GET_MODE_SIZE (Pmode);
1496 if (struct_value)
1498 rtx value = gen_reg_rtx (Pmode);
1499 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1500 emit_move_insn (struct_value, value);
1501 if (REG_P (struct_value))
1502 use_reg (&call_fusage, struct_value);
1503 size += GET_MODE_SIZE (Pmode);
1506 /* All arguments and registers used for the call are set up by now! */
1507 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1509 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1510 and we don't want to load it into a register as an optimization,
1511 because prepare_call_address already did it if it should be done. */
1512 if (GET_CODE (function) != SYMBOL_REF)
1513 function = memory_address (FUNCTION_MODE, function);
1515 /* Generate the actual call instruction and save the return value. */
1516 #ifdef HAVE_untyped_call
1517 if (HAVE_untyped_call)
1518 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1519 result, result_vector (1, result)));
1520 else
1521 #endif
1522 #ifdef HAVE_call_value
1523 if (HAVE_call_value)
1525 rtx valreg = 0;
1527 /* Locate the unique return register. It is not possible to
1528 express a call that sets more than one return register using
1529 call_value; use untyped_call for that. In fact, untyped_call
1530 only needs to save the return registers in the given block. */
1531 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1532 if ((mode = apply_result_mode[regno]) != VOIDmode)
1534 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1536 valreg = gen_rtx_REG (mode, regno);
1539 emit_call_insn (GEN_CALL_VALUE (valreg,
1540 gen_rtx_MEM (FUNCTION_MODE, function),
1541 const0_rtx, NULL_RTX, const0_rtx));
1543 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1545 else
1546 #endif
1547 gcc_unreachable ();
1549 /* Find the CALL insn we just emitted, and attach the register usage
1550 information. */
1551 call_insn = last_call_insn ();
1552 add_function_usage_to (call_insn, call_fusage);
1554 /* Restore the stack. */
1555 #ifdef HAVE_save_stack_nonlocal
1556 if (HAVE_save_stack_nonlocal)
1557 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1558 else
1559 #endif
1560 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1562 OK_DEFER_POP;
1564 /* Return the address of the result block. */
1565 result = copy_addr_to_reg (XEXP (result, 0));
1566 return convert_memory_address (ptr_mode, result);
1569 /* Perform an untyped return. */
1571 static void
1572 expand_builtin_return (rtx result)
1574 int size, align, regno;
1575 enum machine_mode mode;
1576 rtx reg;
1577 rtx call_fusage = 0;
1579 result = convert_memory_address (Pmode, result);
1581 apply_result_size ();
1582 result = gen_rtx_MEM (BLKmode, result);
1584 #ifdef HAVE_untyped_return
1585 if (HAVE_untyped_return)
1587 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1588 emit_barrier ();
1589 return;
1591 #endif
1593 /* Restore the return value and note that each value is used. */
1594 size = 0;
1595 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1596 if ((mode = apply_result_mode[regno]) != VOIDmode)
1598 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1599 if (size % align != 0)
1600 size = CEIL (size, align) * align;
1601 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1602 emit_move_insn (reg, adjust_address (result, mode, size));
1604 push_to_sequence (call_fusage);
1605 emit_use (reg);
1606 call_fusage = get_insns ();
1607 end_sequence ();
1608 size += GET_MODE_SIZE (mode);
1611 /* Put the USE insns before the return. */
1612 emit_insn (call_fusage);
1614 /* Return whatever values was restored by jumping directly to the end
1615 of the function. */
1616 expand_naked_return ();
1619 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1621 static enum type_class
1622 type_to_class (tree type)
1624 switch (TREE_CODE (type))
1626 case VOID_TYPE: return void_type_class;
1627 case INTEGER_TYPE: return integer_type_class;
1628 case ENUMERAL_TYPE: return enumeral_type_class;
1629 case BOOLEAN_TYPE: return boolean_type_class;
1630 case POINTER_TYPE: return pointer_type_class;
1631 case REFERENCE_TYPE: return reference_type_class;
1632 case OFFSET_TYPE: return offset_type_class;
1633 case REAL_TYPE: return real_type_class;
1634 case COMPLEX_TYPE: return complex_type_class;
1635 case FUNCTION_TYPE: return function_type_class;
1636 case METHOD_TYPE: return method_type_class;
1637 case RECORD_TYPE: return record_type_class;
1638 case UNION_TYPE:
1639 case QUAL_UNION_TYPE: return union_type_class;
1640 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1641 ? string_type_class : array_type_class);
1642 case LANG_TYPE: return lang_type_class;
1643 default: return no_type_class;
1647 /* Expand a call EXP to __builtin_classify_type. */
1649 static rtx
1650 expand_builtin_classify_type (tree exp)
1652 if (call_expr_nargs (exp))
1653 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1654 return GEN_INT (no_type_class);
1657 /* This helper macro, meant to be used in mathfn_built_in below,
1658 determines which among a set of three builtin math functions is
1659 appropriate for a given type mode. The `F' and `L' cases are
1660 automatically generated from the `double' case. */
1661 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1662 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1663 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1664 fcodel = BUILT_IN_MATHFN##L ; break;
1665 /* Similar to above, but appends _R after any F/L suffix. */
1666 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1667 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1668 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1669 fcodel = BUILT_IN_MATHFN##L_R ; break;
1671 /* Return mathematic function equivalent to FN but operating directly
1672 on TYPE, if available. If IMPLICIT is true find the function in
1673 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1674 can't do the conversion, return zero. */
1676 static tree
1677 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1679 tree const *const fn_arr
1680 = implicit ? implicit_built_in_decls : built_in_decls;
1681 enum built_in_function fcode, fcodef, fcodel;
1683 switch (fn)
1685 CASE_MATHFN (BUILT_IN_ACOS)
1686 CASE_MATHFN (BUILT_IN_ACOSH)
1687 CASE_MATHFN (BUILT_IN_ASIN)
1688 CASE_MATHFN (BUILT_IN_ASINH)
1689 CASE_MATHFN (BUILT_IN_ATAN)
1690 CASE_MATHFN (BUILT_IN_ATAN2)
1691 CASE_MATHFN (BUILT_IN_ATANH)
1692 CASE_MATHFN (BUILT_IN_CBRT)
1693 CASE_MATHFN (BUILT_IN_CEIL)
1694 CASE_MATHFN (BUILT_IN_CEXPI)
1695 CASE_MATHFN (BUILT_IN_COPYSIGN)
1696 CASE_MATHFN (BUILT_IN_COS)
1697 CASE_MATHFN (BUILT_IN_COSH)
1698 CASE_MATHFN (BUILT_IN_DREM)
1699 CASE_MATHFN (BUILT_IN_ERF)
1700 CASE_MATHFN (BUILT_IN_ERFC)
1701 CASE_MATHFN (BUILT_IN_EXP)
1702 CASE_MATHFN (BUILT_IN_EXP10)
1703 CASE_MATHFN (BUILT_IN_EXP2)
1704 CASE_MATHFN (BUILT_IN_EXPM1)
1705 CASE_MATHFN (BUILT_IN_FABS)
1706 CASE_MATHFN (BUILT_IN_FDIM)
1707 CASE_MATHFN (BUILT_IN_FLOOR)
1708 CASE_MATHFN (BUILT_IN_FMA)
1709 CASE_MATHFN (BUILT_IN_FMAX)
1710 CASE_MATHFN (BUILT_IN_FMIN)
1711 CASE_MATHFN (BUILT_IN_FMOD)
1712 CASE_MATHFN (BUILT_IN_FREXP)
1713 CASE_MATHFN (BUILT_IN_GAMMA)
1714 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1715 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1716 CASE_MATHFN (BUILT_IN_HYPOT)
1717 CASE_MATHFN (BUILT_IN_ILOGB)
1718 CASE_MATHFN (BUILT_IN_INF)
1719 CASE_MATHFN (BUILT_IN_ISINF)
1720 CASE_MATHFN (BUILT_IN_J0)
1721 CASE_MATHFN (BUILT_IN_J1)
1722 CASE_MATHFN (BUILT_IN_JN)
1723 CASE_MATHFN (BUILT_IN_LCEIL)
1724 CASE_MATHFN (BUILT_IN_LDEXP)
1725 CASE_MATHFN (BUILT_IN_LFLOOR)
1726 CASE_MATHFN (BUILT_IN_LGAMMA)
1727 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1728 CASE_MATHFN (BUILT_IN_LLCEIL)
1729 CASE_MATHFN (BUILT_IN_LLFLOOR)
1730 CASE_MATHFN (BUILT_IN_LLRINT)
1731 CASE_MATHFN (BUILT_IN_LLROUND)
1732 CASE_MATHFN (BUILT_IN_LOG)
1733 CASE_MATHFN (BUILT_IN_LOG10)
1734 CASE_MATHFN (BUILT_IN_LOG1P)
1735 CASE_MATHFN (BUILT_IN_LOG2)
1736 CASE_MATHFN (BUILT_IN_LOGB)
1737 CASE_MATHFN (BUILT_IN_LRINT)
1738 CASE_MATHFN (BUILT_IN_LROUND)
1739 CASE_MATHFN (BUILT_IN_MODF)
1740 CASE_MATHFN (BUILT_IN_NAN)
1741 CASE_MATHFN (BUILT_IN_NANS)
1742 CASE_MATHFN (BUILT_IN_NEARBYINT)
1743 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1744 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1745 CASE_MATHFN (BUILT_IN_POW)
1746 CASE_MATHFN (BUILT_IN_POWI)
1747 CASE_MATHFN (BUILT_IN_POW10)
1748 CASE_MATHFN (BUILT_IN_REMAINDER)
1749 CASE_MATHFN (BUILT_IN_REMQUO)
1750 CASE_MATHFN (BUILT_IN_RINT)
1751 CASE_MATHFN (BUILT_IN_ROUND)
1752 CASE_MATHFN (BUILT_IN_SCALB)
1753 CASE_MATHFN (BUILT_IN_SCALBLN)
1754 CASE_MATHFN (BUILT_IN_SCALBN)
1755 CASE_MATHFN (BUILT_IN_SIGNBIT)
1756 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1757 CASE_MATHFN (BUILT_IN_SIN)
1758 CASE_MATHFN (BUILT_IN_SINCOS)
1759 CASE_MATHFN (BUILT_IN_SINH)
1760 CASE_MATHFN (BUILT_IN_SQRT)
1761 CASE_MATHFN (BUILT_IN_TAN)
1762 CASE_MATHFN (BUILT_IN_TANH)
1763 CASE_MATHFN (BUILT_IN_TGAMMA)
1764 CASE_MATHFN (BUILT_IN_TRUNC)
1765 CASE_MATHFN (BUILT_IN_Y0)
1766 CASE_MATHFN (BUILT_IN_Y1)
1767 CASE_MATHFN (BUILT_IN_YN)
1769 default:
1770 return NULL_TREE;
1773 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1774 return fn_arr[fcode];
1775 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1776 return fn_arr[fcodef];
1777 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1778 return fn_arr[fcodel];
1779 else
1780 return NULL_TREE;
1783 /* Like mathfn_built_in_1(), but always use the implicit array. */
1785 tree
1786 mathfn_built_in (tree type, enum built_in_function fn)
1788 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1791 /* If errno must be maintained, expand the RTL to check if the result,
1792 TARGET, of a built-in function call, EXP, is NaN, and if so set
1793 errno to EDOM. */
1795 static void
1796 expand_errno_check (tree exp, rtx target)
1798 rtx lab = gen_label_rtx ();
1800 /* Test the result; if it is NaN, set errno=EDOM because
1801 the argument was not in the domain. */
1802 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1803 0, lab);
1805 #ifdef TARGET_EDOM
1806 /* If this built-in doesn't throw an exception, set errno directly. */
1807 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1809 #ifdef GEN_ERRNO_RTX
1810 rtx errno_rtx = GEN_ERRNO_RTX;
1811 #else
1812 rtx errno_rtx
1813 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1814 #endif
1815 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1816 emit_label (lab);
1817 return;
1819 #endif
1821 /* Make sure the library call isn't expanded as a tail call. */
1822 CALL_EXPR_TAILCALL (exp) = 0;
1824 /* We can't set errno=EDOM directly; let the library call do it.
1825 Pop the arguments right away in case the call gets deleted. */
1826 NO_DEFER_POP;
1827 expand_call (exp, target, 0);
1828 OK_DEFER_POP;
1829 emit_label (lab);
1832 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1833 Return NULL_RTX if a normal call should be emitted rather than expanding
1834 the function in-line. EXP is the expression that is a call to the builtin
1835 function; if convenient, the result should be placed in TARGET.
1836 SUBTARGET may be used as the target for computing one of EXP's operands. */
1838 static rtx
1839 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1841 optab builtin_optab;
1842 rtx op0, insns, before_call;
1843 tree fndecl = get_callee_fndecl (exp);
1844 enum machine_mode mode;
1845 bool errno_set = false;
1846 tree arg;
1848 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1849 return NULL_RTX;
1851 arg = CALL_EXPR_ARG (exp, 0);
1853 switch (DECL_FUNCTION_CODE (fndecl))
1855 CASE_FLT_FN (BUILT_IN_SQRT):
1856 errno_set = ! tree_expr_nonnegative_p (arg);
1857 builtin_optab = sqrt_optab;
1858 break;
1859 CASE_FLT_FN (BUILT_IN_EXP):
1860 errno_set = true; builtin_optab = exp_optab; break;
1861 CASE_FLT_FN (BUILT_IN_EXP10):
1862 CASE_FLT_FN (BUILT_IN_POW10):
1863 errno_set = true; builtin_optab = exp10_optab; break;
1864 CASE_FLT_FN (BUILT_IN_EXP2):
1865 errno_set = true; builtin_optab = exp2_optab; break;
1866 CASE_FLT_FN (BUILT_IN_EXPM1):
1867 errno_set = true; builtin_optab = expm1_optab; break;
1868 CASE_FLT_FN (BUILT_IN_LOGB):
1869 errno_set = true; builtin_optab = logb_optab; break;
1870 CASE_FLT_FN (BUILT_IN_LOG):
1871 errno_set = true; builtin_optab = log_optab; break;
1872 CASE_FLT_FN (BUILT_IN_LOG10):
1873 errno_set = true; builtin_optab = log10_optab; break;
1874 CASE_FLT_FN (BUILT_IN_LOG2):
1875 errno_set = true; builtin_optab = log2_optab; break;
1876 CASE_FLT_FN (BUILT_IN_LOG1P):
1877 errno_set = true; builtin_optab = log1p_optab; break;
1878 CASE_FLT_FN (BUILT_IN_ASIN):
1879 builtin_optab = asin_optab; break;
1880 CASE_FLT_FN (BUILT_IN_ACOS):
1881 builtin_optab = acos_optab; break;
1882 CASE_FLT_FN (BUILT_IN_TAN):
1883 builtin_optab = tan_optab; break;
1884 CASE_FLT_FN (BUILT_IN_ATAN):
1885 builtin_optab = atan_optab; break;
1886 CASE_FLT_FN (BUILT_IN_FLOOR):
1887 builtin_optab = floor_optab; break;
1888 CASE_FLT_FN (BUILT_IN_CEIL):
1889 builtin_optab = ceil_optab; break;
1890 CASE_FLT_FN (BUILT_IN_TRUNC):
1891 builtin_optab = btrunc_optab; break;
1892 CASE_FLT_FN (BUILT_IN_ROUND):
1893 builtin_optab = round_optab; break;
1894 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1895 builtin_optab = nearbyint_optab;
1896 if (flag_trapping_math)
1897 break;
1898 /* Else fallthrough and expand as rint. */
1899 CASE_FLT_FN (BUILT_IN_RINT):
1900 builtin_optab = rint_optab; break;
1901 default:
1902 gcc_unreachable ();
1905 /* Make a suitable register to place result in. */
1906 mode = TYPE_MODE (TREE_TYPE (exp));
1908 if (! flag_errno_math || ! HONOR_NANS (mode))
1909 errno_set = false;
1911 /* Before working hard, check whether the instruction is available. */
1912 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1914 target = gen_reg_rtx (mode);
1916 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1917 need to expand the argument again. This way, we will not perform
1918 side-effects more the once. */
1919 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1921 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1923 start_sequence ();
1925 /* Compute into TARGET.
1926 Set TARGET to wherever the result comes back. */
1927 target = expand_unop (mode, builtin_optab, op0, target, 0);
1929 if (target != 0)
1931 if (errno_set)
1932 expand_errno_check (exp, target);
1934 /* Output the entire sequence. */
1935 insns = get_insns ();
1936 end_sequence ();
1937 emit_insn (insns);
1938 return target;
1941 /* If we were unable to expand via the builtin, stop the sequence
1942 (without outputting the insns) and call to the library function
1943 with the stabilized argument list. */
1944 end_sequence ();
1947 before_call = get_last_insn ();
1949 return expand_call (exp, target, target == const0_rtx);
1952 /* Expand a call to the builtin binary math functions (pow and atan2).
1953 Return NULL_RTX if a normal call should be emitted rather than expanding the
1954 function in-line. EXP is the expression that is a call to the builtin
1955 function; if convenient, the result should be placed in TARGET.
1956 SUBTARGET may be used as the target for computing one of EXP's
1957 operands. */
1959 static rtx
1960 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1962 optab builtin_optab;
1963 rtx op0, op1, insns;
1964 int op1_type = REAL_TYPE;
1965 tree fndecl = get_callee_fndecl (exp);
1966 tree arg0, arg1;
1967 enum machine_mode mode;
1968 bool errno_set = true;
1970 switch (DECL_FUNCTION_CODE (fndecl))
1972 CASE_FLT_FN (BUILT_IN_SCALBN):
1973 CASE_FLT_FN (BUILT_IN_SCALBLN):
1974 CASE_FLT_FN (BUILT_IN_LDEXP):
1975 op1_type = INTEGER_TYPE;
1976 default:
1977 break;
1980 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
1981 return NULL_RTX;
1983 arg0 = CALL_EXPR_ARG (exp, 0);
1984 arg1 = CALL_EXPR_ARG (exp, 1);
1986 switch (DECL_FUNCTION_CODE (fndecl))
1988 CASE_FLT_FN (BUILT_IN_POW):
1989 builtin_optab = pow_optab; break;
1990 CASE_FLT_FN (BUILT_IN_ATAN2):
1991 builtin_optab = atan2_optab; break;
1992 CASE_FLT_FN (BUILT_IN_SCALB):
1993 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
1994 return 0;
1995 builtin_optab = scalb_optab; break;
1996 CASE_FLT_FN (BUILT_IN_SCALBN):
1997 CASE_FLT_FN (BUILT_IN_SCALBLN):
1998 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
1999 return 0;
2000 /* Fall through... */
2001 CASE_FLT_FN (BUILT_IN_LDEXP):
2002 builtin_optab = ldexp_optab; break;
2003 CASE_FLT_FN (BUILT_IN_FMOD):
2004 builtin_optab = fmod_optab; break;
2005 CASE_FLT_FN (BUILT_IN_REMAINDER):
2006 CASE_FLT_FN (BUILT_IN_DREM):
2007 builtin_optab = remainder_optab; break;
2008 default:
2009 gcc_unreachable ();
2012 /* Make a suitable register to place result in. */
2013 mode = TYPE_MODE (TREE_TYPE (exp));
2015 /* Before working hard, check whether the instruction is available. */
2016 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2017 return NULL_RTX;
2019 target = gen_reg_rtx (mode);
2021 if (! flag_errno_math || ! HONOR_NANS (mode))
2022 errno_set = false;
2024 /* Always stabilize the argument list. */
2025 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2026 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2028 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2029 op1 = expand_normal (arg1);
2031 start_sequence ();
2033 /* Compute into TARGET.
2034 Set TARGET to wherever the result comes back. */
2035 target = expand_binop (mode, builtin_optab, op0, op1,
2036 target, 0, OPTAB_DIRECT);
2038 /* If we were unable to expand via the builtin, stop the sequence
2039 (without outputting the insns) and call to the library function
2040 with the stabilized argument list. */
2041 if (target == 0)
2043 end_sequence ();
2044 return expand_call (exp, target, target == const0_rtx);
2047 if (errno_set)
2048 expand_errno_check (exp, target);
2050 /* Output the entire sequence. */
2051 insns = get_insns ();
2052 end_sequence ();
2053 emit_insn (insns);
2055 return target;
2058 /* Expand a call to the builtin sin and cos math functions.
2059 Return NULL_RTX if a normal call should be emitted rather than expanding the
2060 function in-line. EXP is the expression that is a call to the builtin
2061 function; if convenient, the result should be placed in TARGET.
2062 SUBTARGET may be used as the target for computing one of EXP's
2063 operands. */
2065 static rtx
2066 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2068 optab builtin_optab;
2069 rtx op0, insns;
2070 tree fndecl = get_callee_fndecl (exp);
2071 enum machine_mode mode;
2072 tree arg;
2074 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2075 return NULL_RTX;
2077 arg = CALL_EXPR_ARG (exp, 0);
2079 switch (DECL_FUNCTION_CODE (fndecl))
2081 CASE_FLT_FN (BUILT_IN_SIN):
2082 CASE_FLT_FN (BUILT_IN_COS):
2083 builtin_optab = sincos_optab; break;
2084 default:
2085 gcc_unreachable ();
2088 /* Make a suitable register to place result in. */
2089 mode = TYPE_MODE (TREE_TYPE (exp));
2091 /* Check if sincos insn is available, otherwise fallback
2092 to sin or cos insn. */
2093 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2094 switch (DECL_FUNCTION_CODE (fndecl))
2096 CASE_FLT_FN (BUILT_IN_SIN):
2097 builtin_optab = sin_optab; break;
2098 CASE_FLT_FN (BUILT_IN_COS):
2099 builtin_optab = cos_optab; break;
2100 default:
2101 gcc_unreachable ();
2104 /* Before working hard, check whether the instruction is available. */
2105 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2107 target = gen_reg_rtx (mode);
2109 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2110 need to expand the argument again. This way, we will not perform
2111 side-effects more the once. */
2112 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2114 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2116 start_sequence ();
2118 /* Compute into TARGET.
2119 Set TARGET to wherever the result comes back. */
2120 if (builtin_optab == sincos_optab)
2122 int result;
2124 switch (DECL_FUNCTION_CODE (fndecl))
2126 CASE_FLT_FN (BUILT_IN_SIN):
2127 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2128 break;
2129 CASE_FLT_FN (BUILT_IN_COS):
2130 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2131 break;
2132 default:
2133 gcc_unreachable ();
2135 gcc_assert (result);
2137 else
2139 target = expand_unop (mode, builtin_optab, op0, target, 0);
2142 if (target != 0)
2144 /* Output the entire sequence. */
2145 insns = get_insns ();
2146 end_sequence ();
2147 emit_insn (insns);
2148 return target;
2151 /* If we were unable to expand via the builtin, stop the sequence
2152 (without outputting the insns) and call to the library function
2153 with the stabilized argument list. */
2154 end_sequence ();
2157 target = expand_call (exp, target, target == const0_rtx);
2159 return target;
2162 /* Expand a call to one of the builtin math functions that operate on
2163 floating point argument and output an integer result (ilogb, isinf,
2164 isnan, etc).
2165 Return 0 if a normal call should be emitted rather than expanding the
2166 function in-line. EXP is the expression that is a call to the builtin
2167 function; if convenient, the result should be placed in TARGET.
2168 SUBTARGET may be used as the target for computing one of EXP's operands. */
2170 static rtx
2171 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2173 optab builtin_optab = 0;
2174 enum insn_code icode = CODE_FOR_nothing;
2175 rtx op0;
2176 tree fndecl = get_callee_fndecl (exp);
2177 enum machine_mode mode;
2178 bool errno_set = false;
2179 tree arg;
2181 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2182 return NULL_RTX;
2184 arg = CALL_EXPR_ARG (exp, 0);
2186 switch (DECL_FUNCTION_CODE (fndecl))
2188 CASE_FLT_FN (BUILT_IN_ILOGB):
2189 errno_set = true; builtin_optab = ilogb_optab; break;
2190 CASE_FLT_FN (BUILT_IN_ISINF):
2191 builtin_optab = isinf_optab; break;
2192 case BUILT_IN_ISNORMAL:
2193 case BUILT_IN_ISFINITE:
2194 CASE_FLT_FN (BUILT_IN_FINITE):
2195 /* These builtins have no optabs (yet). */
2196 break;
2197 default:
2198 gcc_unreachable ();
2201 /* There's no easy way to detect the case we need to set EDOM. */
2202 if (flag_errno_math && errno_set)
2203 return NULL_RTX;
2205 /* Optab mode depends on the mode of the input argument. */
2206 mode = TYPE_MODE (TREE_TYPE (arg));
2208 if (builtin_optab)
2209 icode = optab_handler (builtin_optab, mode)->insn_code;
2211 /* Before working hard, check whether the instruction is available. */
2212 if (icode != CODE_FOR_nothing)
2214 /* Make a suitable register to place result in. */
2215 if (!target
2216 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2217 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2219 gcc_assert (insn_data[icode].operand[0].predicate
2220 (target, GET_MODE (target)));
2222 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2223 need to expand the argument again. This way, we will not perform
2224 side-effects more the once. */
2225 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2227 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2229 if (mode != GET_MODE (op0))
2230 op0 = convert_to_mode (mode, op0, 0);
2232 /* Compute into TARGET.
2233 Set TARGET to wherever the result comes back. */
2234 emit_unop_insn (icode, target, op0, UNKNOWN);
2235 return target;
2238 /* If there is no optab, try generic code. */
2239 switch (DECL_FUNCTION_CODE (fndecl))
2241 tree result;
2243 CASE_FLT_FN (BUILT_IN_ISINF):
2245 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2246 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2247 tree const type = TREE_TYPE (arg);
2248 REAL_VALUE_TYPE r;
2249 char buf[128];
2251 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2252 real_from_string (&r, buf);
2253 result = build_call_expr (isgr_fn, 2,
2254 fold_build1 (ABS_EXPR, type, arg),
2255 build_real (type, r));
2256 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2258 CASE_FLT_FN (BUILT_IN_FINITE):
2259 case BUILT_IN_ISFINITE:
2261 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2262 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2263 tree const type = TREE_TYPE (arg);
2264 REAL_VALUE_TYPE r;
2265 char buf[128];
2267 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2268 real_from_string (&r, buf);
2269 result = build_call_expr (isle_fn, 2,
2270 fold_build1 (ABS_EXPR, type, arg),
2271 build_real (type, r));
2272 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2274 case BUILT_IN_ISNORMAL:
2276 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2277 islessequal(fabs(x),DBL_MAX). */
2278 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2279 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2280 tree const type = TREE_TYPE (arg);
2281 REAL_VALUE_TYPE rmax, rmin;
2282 char buf[128];
2284 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2285 real_from_string (&rmax, buf);
2286 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2287 real_from_string (&rmin, buf);
2288 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2289 result = build_call_expr (isle_fn, 2, arg,
2290 build_real (type, rmax));
2291 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2292 build_call_expr (isge_fn, 2, arg,
2293 build_real (type, rmin)));
2294 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2296 default:
2297 break;
2300 target = expand_call (exp, target, target == const0_rtx);
2302 return target;
2305 /* Expand a call to the builtin sincos math function.
2306 Return NULL_RTX if a normal call should be emitted rather than expanding the
2307 function in-line. EXP is the expression that is a call to the builtin
2308 function. */
2310 static rtx
2311 expand_builtin_sincos (tree exp)
2313 rtx op0, op1, op2, target1, target2;
2314 enum machine_mode mode;
2315 tree arg, sinp, cosp;
2316 int result;
2318 if (!validate_arglist (exp, REAL_TYPE,
2319 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2320 return NULL_RTX;
2322 arg = CALL_EXPR_ARG (exp, 0);
2323 sinp = CALL_EXPR_ARG (exp, 1);
2324 cosp = CALL_EXPR_ARG (exp, 2);
2326 /* Make a suitable register to place result in. */
2327 mode = TYPE_MODE (TREE_TYPE (arg));
2329 /* Check if sincos insn is available, otherwise emit the call. */
2330 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2331 return NULL_RTX;
2333 target1 = gen_reg_rtx (mode);
2334 target2 = gen_reg_rtx (mode);
2336 op0 = expand_normal (arg);
2337 op1 = expand_normal (build_fold_indirect_ref (sinp));
2338 op2 = expand_normal (build_fold_indirect_ref (cosp));
2340 /* Compute into target1 and target2.
2341 Set TARGET to wherever the result comes back. */
2342 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2343 gcc_assert (result);
2345 /* Move target1 and target2 to the memory locations indicated
2346 by op1 and op2. */
2347 emit_move_insn (op1, target1);
2348 emit_move_insn (op2, target2);
2350 return const0_rtx;
2353 /* Expand a call to the internal cexpi builtin to the sincos math function.
2354 EXP is the expression that is a call to the builtin function; if convenient,
2355 the result should be placed in TARGET. SUBTARGET may be used as the target
2356 for computing one of EXP's operands. */
2358 static rtx
2359 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2361 tree fndecl = get_callee_fndecl (exp);
2362 tree arg, type;
2363 enum machine_mode mode;
2364 rtx op0, op1, op2;
2366 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2367 return NULL_RTX;
2369 arg = CALL_EXPR_ARG (exp, 0);
2370 type = TREE_TYPE (arg);
2371 mode = TYPE_MODE (TREE_TYPE (arg));
2373 /* Try expanding via a sincos optab, fall back to emitting a libcall
2374 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2375 is only generated from sincos, cexp or if we have either of them. */
2376 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2378 op1 = gen_reg_rtx (mode);
2379 op2 = gen_reg_rtx (mode);
2381 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2383 /* Compute into op1 and op2. */
2384 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2386 else if (TARGET_HAS_SINCOS)
2388 tree call, fn = NULL_TREE;
2389 tree top1, top2;
2390 rtx op1a, op2a;
2392 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2393 fn = built_in_decls[BUILT_IN_SINCOSF];
2394 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2395 fn = built_in_decls[BUILT_IN_SINCOS];
2396 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2397 fn = built_in_decls[BUILT_IN_SINCOSL];
2398 else
2399 gcc_unreachable ();
2401 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2402 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2403 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2404 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2405 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2406 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2408 /* Make sure not to fold the sincos call again. */
2409 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2410 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2411 call, 3, arg, top1, top2));
2413 else
2415 tree call, fn = NULL_TREE, narg;
2416 tree ctype = build_complex_type (type);
2418 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2419 fn = built_in_decls[BUILT_IN_CEXPF];
2420 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2421 fn = built_in_decls[BUILT_IN_CEXP];
2422 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2423 fn = built_in_decls[BUILT_IN_CEXPL];
2424 else
2425 gcc_unreachable ();
2427 /* If we don't have a decl for cexp create one. This is the
2428 friendliest fallback if the user calls __builtin_cexpi
2429 without full target C99 function support. */
2430 if (fn == NULL_TREE)
2432 tree fntype;
2433 const char *name = NULL;
2435 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2436 name = "cexpf";
2437 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2438 name = "cexp";
2439 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2440 name = "cexpl";
2442 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2443 fn = build_fn_decl (name, fntype);
2446 narg = fold_build2 (COMPLEX_EXPR, ctype,
2447 build_real (type, dconst0), arg);
2449 /* Make sure not to fold the cexp call again. */
2450 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2451 return expand_expr (build_call_nary (ctype, call, 1, narg),
2452 target, VOIDmode, EXPAND_NORMAL);
2455 /* Now build the proper return type. */
2456 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2457 make_tree (TREE_TYPE (arg), op2),
2458 make_tree (TREE_TYPE (arg), op1)),
2459 target, VOIDmode, EXPAND_NORMAL);
2462 /* Expand a call to one of the builtin rounding functions gcc defines
2463 as an extension (lfloor and lceil). As these are gcc extensions we
2464 do not need to worry about setting errno to EDOM.
2465 If expanding via optab fails, lower expression to (int)(floor(x)).
2466 EXP is the expression that is a call to the builtin function;
2467 if convenient, the result should be placed in TARGET. SUBTARGET may
2468 be used as the target for computing one of EXP's operands. */
2470 static rtx
2471 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2473 convert_optab builtin_optab;
2474 rtx op0, insns, tmp;
2475 tree fndecl = get_callee_fndecl (exp);
2476 enum built_in_function fallback_fn;
2477 tree fallback_fndecl;
2478 enum machine_mode mode;
2479 tree arg;
2481 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2482 gcc_unreachable ();
2484 arg = CALL_EXPR_ARG (exp, 0);
2486 switch (DECL_FUNCTION_CODE (fndecl))
2488 CASE_FLT_FN (BUILT_IN_LCEIL):
2489 CASE_FLT_FN (BUILT_IN_LLCEIL):
2490 builtin_optab = lceil_optab;
2491 fallback_fn = BUILT_IN_CEIL;
2492 break;
2494 CASE_FLT_FN (BUILT_IN_LFLOOR):
2495 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2496 builtin_optab = lfloor_optab;
2497 fallback_fn = BUILT_IN_FLOOR;
2498 break;
2500 default:
2501 gcc_unreachable ();
2504 /* Make a suitable register to place result in. */
2505 mode = TYPE_MODE (TREE_TYPE (exp));
2507 target = gen_reg_rtx (mode);
2509 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2510 need to expand the argument again. This way, we will not perform
2511 side-effects more the once. */
2512 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2514 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2516 start_sequence ();
2518 /* Compute into TARGET. */
2519 if (expand_sfix_optab (target, op0, builtin_optab))
2521 /* Output the entire sequence. */
2522 insns = get_insns ();
2523 end_sequence ();
2524 emit_insn (insns);
2525 return target;
2528 /* If we were unable to expand via the builtin, stop the sequence
2529 (without outputting the insns). */
2530 end_sequence ();
2532 /* Fall back to floating point rounding optab. */
2533 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2535 /* For non-C99 targets we may end up without a fallback fndecl here
2536 if the user called __builtin_lfloor directly. In this case emit
2537 a call to the floor/ceil variants nevertheless. This should result
2538 in the best user experience for not full C99 targets. */
2539 if (fallback_fndecl == NULL_TREE)
2541 tree fntype;
2542 const char *name = NULL;
2544 switch (DECL_FUNCTION_CODE (fndecl))
2546 case BUILT_IN_LCEIL:
2547 case BUILT_IN_LLCEIL:
2548 name = "ceil";
2549 break;
2550 case BUILT_IN_LCEILF:
2551 case BUILT_IN_LLCEILF:
2552 name = "ceilf";
2553 break;
2554 case BUILT_IN_LCEILL:
2555 case BUILT_IN_LLCEILL:
2556 name = "ceill";
2557 break;
2558 case BUILT_IN_LFLOOR:
2559 case BUILT_IN_LLFLOOR:
2560 name = "floor";
2561 break;
2562 case BUILT_IN_LFLOORF:
2563 case BUILT_IN_LLFLOORF:
2564 name = "floorf";
2565 break;
2566 case BUILT_IN_LFLOORL:
2567 case BUILT_IN_LLFLOORL:
2568 name = "floorl";
2569 break;
2570 default:
2571 gcc_unreachable ();
2574 fntype = build_function_type_list (TREE_TYPE (arg),
2575 TREE_TYPE (arg), NULL_TREE);
2576 fallback_fndecl = build_fn_decl (name, fntype);
2579 exp = build_call_expr (fallback_fndecl, 1, arg);
2581 tmp = expand_normal (exp);
2583 /* Truncate the result of floating point optab to integer
2584 via expand_fix (). */
2585 target = gen_reg_rtx (mode);
2586 expand_fix (target, tmp, 0);
2588 return target;
2591 /* Expand a call to one of the builtin math functions doing integer
2592 conversion (lrint).
2593 Return 0 if a normal call should be emitted rather than expanding the
2594 function in-line. EXP is the expression that is a call to the builtin
2595 function; if convenient, the result should be placed in TARGET.
2596 SUBTARGET may be used as the target for computing one of EXP's operands. */
2598 static rtx
2599 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2601 convert_optab builtin_optab;
2602 rtx op0, insns;
2603 tree fndecl = get_callee_fndecl (exp);
2604 tree arg;
2605 enum machine_mode mode;
2607 /* There's no easy way to detect the case we need to set EDOM. */
2608 if (flag_errno_math)
2609 return NULL_RTX;
2611 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2612 gcc_unreachable ();
2614 arg = CALL_EXPR_ARG (exp, 0);
2616 switch (DECL_FUNCTION_CODE (fndecl))
2618 CASE_FLT_FN (BUILT_IN_LRINT):
2619 CASE_FLT_FN (BUILT_IN_LLRINT):
2620 builtin_optab = lrint_optab; break;
2621 CASE_FLT_FN (BUILT_IN_LROUND):
2622 CASE_FLT_FN (BUILT_IN_LLROUND):
2623 builtin_optab = lround_optab; break;
2624 default:
2625 gcc_unreachable ();
2628 /* Make a suitable register to place result in. */
2629 mode = TYPE_MODE (TREE_TYPE (exp));
2631 target = gen_reg_rtx (mode);
2633 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2634 need to expand the argument again. This way, we will not perform
2635 side-effects more the once. */
2636 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2638 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2640 start_sequence ();
2642 if (expand_sfix_optab (target, op0, builtin_optab))
2644 /* Output the entire sequence. */
2645 insns = get_insns ();
2646 end_sequence ();
2647 emit_insn (insns);
2648 return target;
2651 /* If we were unable to expand via the builtin, stop the sequence
2652 (without outputting the insns) and call to the library function
2653 with the stabilized argument list. */
2654 end_sequence ();
2656 target = expand_call (exp, target, target == const0_rtx);
2658 return target;
2661 /* To evaluate powi(x,n), the floating point value x raised to the
2662 constant integer exponent n, we use a hybrid algorithm that
2663 combines the "window method" with look-up tables. For an
2664 introduction to exponentiation algorithms and "addition chains",
2665 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2666 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2667 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2668 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2670 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2671 multiplications to inline before calling the system library's pow
2672 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2673 so this default never requires calling pow, powf or powl. */
2675 #ifndef POWI_MAX_MULTS
2676 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2677 #endif
2679 /* The size of the "optimal power tree" lookup table. All
2680 exponents less than this value are simply looked up in the
2681 powi_table below. This threshold is also used to size the
2682 cache of pseudo registers that hold intermediate results. */
2683 #define POWI_TABLE_SIZE 256
2685 /* The size, in bits of the window, used in the "window method"
2686 exponentiation algorithm. This is equivalent to a radix of
2687 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2688 #define POWI_WINDOW_SIZE 3
2690 /* The following table is an efficient representation of an
2691 "optimal power tree". For each value, i, the corresponding
2692 value, j, in the table states than an optimal evaluation
2693 sequence for calculating pow(x,i) can be found by evaluating
2694 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2695 100 integers is given in Knuth's "Seminumerical algorithms". */
2697 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2699 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2700 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2701 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2702 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2703 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2704 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2705 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2706 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2707 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2708 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2709 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2710 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2711 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2712 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2713 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2714 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2715 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2716 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2717 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2718 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2719 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2720 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2721 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2722 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2723 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2724 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2725 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2726 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2727 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2728 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2729 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2730 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2734 /* Return the number of multiplications required to calculate
2735 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2736 subroutine of powi_cost. CACHE is an array indicating
2737 which exponents have already been calculated. */
2739 static int
2740 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2742 /* If we've already calculated this exponent, then this evaluation
2743 doesn't require any additional multiplications. */
2744 if (cache[n])
2745 return 0;
2747 cache[n] = true;
2748 return powi_lookup_cost (n - powi_table[n], cache)
2749 + powi_lookup_cost (powi_table[n], cache) + 1;
2752 /* Return the number of multiplications required to calculate
2753 powi(x,n) for an arbitrary x, given the exponent N. This
2754 function needs to be kept in sync with expand_powi below. */
2756 static int
2757 powi_cost (HOST_WIDE_INT n)
2759 bool cache[POWI_TABLE_SIZE];
2760 unsigned HOST_WIDE_INT digit;
2761 unsigned HOST_WIDE_INT val;
2762 int result;
2764 if (n == 0)
2765 return 0;
2767 /* Ignore the reciprocal when calculating the cost. */
2768 val = (n < 0) ? -n : n;
2770 /* Initialize the exponent cache. */
2771 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2772 cache[1] = true;
2774 result = 0;
2776 while (val >= POWI_TABLE_SIZE)
2778 if (val & 1)
2780 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2781 result += powi_lookup_cost (digit, cache)
2782 + POWI_WINDOW_SIZE + 1;
2783 val >>= POWI_WINDOW_SIZE;
2785 else
2787 val >>= 1;
2788 result++;
2792 return result + powi_lookup_cost (val, cache);
2795 /* Recursive subroutine of expand_powi. This function takes the array,
2796 CACHE, of already calculated exponents and an exponent N and returns
2797 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2799 static rtx
2800 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2802 unsigned HOST_WIDE_INT digit;
2803 rtx target, result;
2804 rtx op0, op1;
2806 if (n < POWI_TABLE_SIZE)
2808 if (cache[n])
2809 return cache[n];
2811 target = gen_reg_rtx (mode);
2812 cache[n] = target;
2814 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2815 op1 = expand_powi_1 (mode, powi_table[n], cache);
2817 else if (n & 1)
2819 target = gen_reg_rtx (mode);
2820 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2821 op0 = expand_powi_1 (mode, n - digit, cache);
2822 op1 = expand_powi_1 (mode, digit, cache);
2824 else
2826 target = gen_reg_rtx (mode);
2827 op0 = expand_powi_1 (mode, n >> 1, cache);
2828 op1 = op0;
2831 result = expand_mult (mode, op0, op1, target, 0);
2832 if (result != target)
2833 emit_move_insn (target, result);
2834 return target;
2837 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2838 floating point operand in mode MODE, and N is the exponent. This
2839 function needs to be kept in sync with powi_cost above. */
2841 static rtx
2842 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2844 unsigned HOST_WIDE_INT val;
2845 rtx cache[POWI_TABLE_SIZE];
2846 rtx result;
2848 if (n == 0)
2849 return CONST1_RTX (mode);
2851 val = (n < 0) ? -n : n;
2853 memset (cache, 0, sizeof (cache));
2854 cache[1] = x;
2856 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2858 /* If the original exponent was negative, reciprocate the result. */
2859 if (n < 0)
2860 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2861 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2863 return result;
2866 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2867 a normal call should be emitted rather than expanding the function
2868 in-line. EXP is the expression that is a call to the builtin
2869 function; if convenient, the result should be placed in TARGET. */
2871 static rtx
2872 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2874 tree arg0, arg1;
2875 tree fn, narg0;
2876 tree type = TREE_TYPE (exp);
2877 REAL_VALUE_TYPE cint, c, c2;
2878 HOST_WIDE_INT n;
2879 rtx op, op2;
2880 enum machine_mode mode = TYPE_MODE (type);
2882 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2883 return NULL_RTX;
2885 arg0 = CALL_EXPR_ARG (exp, 0);
2886 arg1 = CALL_EXPR_ARG (exp, 1);
2888 if (TREE_CODE (arg1) != REAL_CST
2889 || TREE_OVERFLOW (arg1))
2890 return expand_builtin_mathfn_2 (exp, target, subtarget);
2892 /* Handle constant exponents. */
2894 /* For integer valued exponents we can expand to an optimal multiplication
2895 sequence using expand_powi. */
2896 c = TREE_REAL_CST (arg1);
2897 n = real_to_integer (&c);
2898 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2899 if (real_identical (&c, &cint)
2900 && ((n >= -1 && n <= 2)
2901 || (flag_unsafe_math_optimizations
2902 && !optimize_size
2903 && powi_cost (n) <= POWI_MAX_MULTS)))
2905 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2906 if (n != 1)
2908 op = force_reg (mode, op);
2909 op = expand_powi (op, mode, n);
2911 return op;
2914 narg0 = builtin_save_expr (arg0);
2916 /* If the exponent is not integer valued, check if it is half of an integer.
2917 In this case we can expand to sqrt (x) * x**(n/2). */
2918 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2919 if (fn != NULL_TREE)
2921 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2922 n = real_to_integer (&c2);
2923 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2924 if (real_identical (&c2, &cint)
2925 && ((flag_unsafe_math_optimizations
2926 && !optimize_size
2927 && powi_cost (n/2) <= POWI_MAX_MULTS)
2928 || n == 1))
2930 tree call_expr = build_call_expr (fn, 1, narg0);
2931 /* Use expand_expr in case the newly built call expression
2932 was folded to a non-call. */
2933 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2934 if (n != 1)
2936 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2937 op2 = force_reg (mode, op2);
2938 op2 = expand_powi (op2, mode, abs (n / 2));
2939 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2940 0, OPTAB_LIB_WIDEN);
2941 /* If the original exponent was negative, reciprocate the
2942 result. */
2943 if (n < 0)
2944 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2945 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2947 return op;
2951 /* Try if the exponent is a third of an integer. In this case
2952 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2953 different from pow (x, 1./3.) due to rounding and behavior
2954 with negative x we need to constrain this transformation to
2955 unsafe math and positive x or finite math. */
2956 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2957 if (fn != NULL_TREE
2958 && flag_unsafe_math_optimizations
2959 && (tree_expr_nonnegative_p (arg0)
2960 || !HONOR_NANS (mode)))
2962 REAL_VALUE_TYPE dconst3;
2963 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2964 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2965 real_round (&c2, mode, &c2);
2966 n = real_to_integer (&c2);
2967 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2968 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2969 real_convert (&c2, mode, &c2);
2970 if (real_identical (&c2, &c)
2971 && ((!optimize_size
2972 && powi_cost (n/3) <= POWI_MAX_MULTS)
2973 || n == 1))
2975 tree call_expr = build_call_expr (fn, 1,narg0);
2976 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2977 if (abs (n) % 3 == 2)
2978 op = expand_simple_binop (mode, MULT, op, op, op,
2979 0, OPTAB_LIB_WIDEN);
2980 if (n != 1)
2982 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2983 op2 = force_reg (mode, op2);
2984 op2 = expand_powi (op2, mode, abs (n / 3));
2985 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2986 0, OPTAB_LIB_WIDEN);
2987 /* If the original exponent was negative, reciprocate the
2988 result. */
2989 if (n < 0)
2990 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2991 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2993 return op;
2997 /* Fall back to optab expansion. */
2998 return expand_builtin_mathfn_2 (exp, target, subtarget);
3001 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3002 a normal call should be emitted rather than expanding the function
3003 in-line. EXP is the expression that is a call to the builtin
3004 function; if convenient, the result should be placed in TARGET. */
3006 static rtx
3007 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3009 tree arg0, arg1;
3010 rtx op0, op1;
3011 enum machine_mode mode;
3012 enum machine_mode mode2;
3014 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3015 return NULL_RTX;
3017 arg0 = CALL_EXPR_ARG (exp, 0);
3018 arg1 = CALL_EXPR_ARG (exp, 1);
3019 mode = TYPE_MODE (TREE_TYPE (exp));
3021 /* Handle constant power. */
3023 if (TREE_CODE (arg1) == INTEGER_CST
3024 && !TREE_OVERFLOW (arg1))
3026 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3028 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3029 Otherwise, check the number of multiplications required. */
3030 if ((TREE_INT_CST_HIGH (arg1) == 0
3031 || TREE_INT_CST_HIGH (arg1) == -1)
3032 && ((n >= -1 && n <= 2)
3033 || (! optimize_size
3034 && powi_cost (n) <= POWI_MAX_MULTS)))
3036 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3037 op0 = force_reg (mode, op0);
3038 return expand_powi (op0, mode, n);
3042 /* Emit a libcall to libgcc. */
3044 /* Mode of the 2nd argument must match that of an int. */
3045 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3047 if (target == NULL_RTX)
3048 target = gen_reg_rtx (mode);
3050 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3051 if (GET_MODE (op0) != mode)
3052 op0 = convert_to_mode (mode, op0, 0);
3053 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3054 if (GET_MODE (op1) != mode2)
3055 op1 = convert_to_mode (mode2, op1, 0);
3057 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3058 target, LCT_CONST, mode, 2,
3059 op0, mode, op1, mode2);
3061 return target;
3064 /* Expand expression EXP which is a call to the strlen builtin. Return
3065 NULL_RTX if we failed the caller should emit a normal call, otherwise
3066 try to get the result in TARGET, if convenient. */
3068 static rtx
3069 expand_builtin_strlen (tree exp, rtx target,
3070 enum machine_mode target_mode)
3072 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3073 return NULL_RTX;
3074 else
3076 rtx pat;
3077 tree len;
3078 tree src = CALL_EXPR_ARG (exp, 0);
3079 rtx result, src_reg, char_rtx, before_strlen;
3080 enum machine_mode insn_mode = target_mode, char_mode;
3081 enum insn_code icode = CODE_FOR_nothing;
3082 int align;
3084 /* If the length can be computed at compile-time, return it. */
3085 len = c_strlen (src, 0);
3086 if (len)
3087 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3089 /* If the length can be computed at compile-time and is constant
3090 integer, but there are side-effects in src, evaluate
3091 src for side-effects, then return len.
3092 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3093 can be optimized into: i++; x = 3; */
3094 len = c_strlen (src, 1);
3095 if (len && TREE_CODE (len) == INTEGER_CST)
3097 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3098 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3101 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3103 /* If SRC is not a pointer type, don't do this operation inline. */
3104 if (align == 0)
3105 return NULL_RTX;
3107 /* Bail out if we can't compute strlen in the right mode. */
3108 while (insn_mode != VOIDmode)
3110 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3111 if (icode != CODE_FOR_nothing)
3112 break;
3114 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3116 if (insn_mode == VOIDmode)
3117 return NULL_RTX;
3119 /* Make a place to write the result of the instruction. */
3120 result = target;
3121 if (! (result != 0
3122 && REG_P (result)
3123 && GET_MODE (result) == insn_mode
3124 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3125 result = gen_reg_rtx (insn_mode);
3127 /* Make a place to hold the source address. We will not expand
3128 the actual source until we are sure that the expansion will
3129 not fail -- there are trees that cannot be expanded twice. */
3130 src_reg = gen_reg_rtx (Pmode);
3132 /* Mark the beginning of the strlen sequence so we can emit the
3133 source operand later. */
3134 before_strlen = get_last_insn ();
3136 char_rtx = const0_rtx;
3137 char_mode = insn_data[(int) icode].operand[2].mode;
3138 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3139 char_mode))
3140 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3142 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3143 char_rtx, GEN_INT (align));
3144 if (! pat)
3145 return NULL_RTX;
3146 emit_insn (pat);
3148 /* Now that we are assured of success, expand the source. */
3149 start_sequence ();
3150 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3151 if (pat != src_reg)
3152 emit_move_insn (src_reg, pat);
3153 pat = get_insns ();
3154 end_sequence ();
3156 if (before_strlen)
3157 emit_insn_after (pat, before_strlen);
3158 else
3159 emit_insn_before (pat, get_insns ());
3161 /* Return the value in the proper mode for this function. */
3162 if (GET_MODE (result) == target_mode)
3163 target = result;
3164 else if (target != 0)
3165 convert_move (target, result, 0);
3166 else
3167 target = convert_to_mode (target_mode, result, 0);
3169 return target;
3173 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3174 caller should emit a normal call, otherwise try to get the result
3175 in TARGET, if convenient (and in mode MODE if that's convenient). */
3177 static rtx
3178 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3180 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3182 tree type = TREE_TYPE (exp);
3183 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3184 CALL_EXPR_ARG (exp, 1), type);
3185 if (result)
3186 return expand_expr (result, target, mode, EXPAND_NORMAL);
3188 return NULL_RTX;
3191 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3192 caller should emit a normal call, otherwise try to get the result
3193 in TARGET, if convenient (and in mode MODE if that's convenient). */
3195 static rtx
3196 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3198 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3200 tree type = TREE_TYPE (exp);
3201 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3202 CALL_EXPR_ARG (exp, 1), type);
3203 if (result)
3204 return expand_expr (result, target, mode, EXPAND_NORMAL);
3206 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3208 return NULL_RTX;
3211 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3212 caller should emit a normal call, otherwise try to get the result
3213 in TARGET, if convenient (and in mode MODE if that's convenient). */
3215 static rtx
3216 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3218 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3220 tree type = TREE_TYPE (exp);
3221 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3222 CALL_EXPR_ARG (exp, 1), type);
3223 if (result)
3224 return expand_expr (result, target, mode, EXPAND_NORMAL);
3226 return NULL_RTX;
3229 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3230 caller should emit a normal call, otherwise try to get the result
3231 in TARGET, if convenient (and in mode MODE if that's convenient). */
3233 static rtx
3234 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3236 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3238 tree type = TREE_TYPE (exp);
3239 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3240 CALL_EXPR_ARG (exp, 1), type);
3241 if (result)
3242 return expand_expr (result, target, mode, EXPAND_NORMAL);
3244 return NULL_RTX;
3247 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3248 bytes from constant string DATA + OFFSET and return it as target
3249 constant. */
3251 static rtx
3252 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3253 enum machine_mode mode)
3255 const char *str = (const char *) data;
3257 gcc_assert (offset >= 0
3258 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3259 <= strlen (str) + 1));
3261 return c_readstr (str + offset, mode);
3264 /* Expand a call EXP to the memcpy builtin.
3265 Return NULL_RTX if we failed, the caller should emit a normal call,
3266 otherwise try to get the result in TARGET, if convenient (and in
3267 mode MODE if that's convenient). */
3269 static rtx
3270 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3272 tree fndecl = get_callee_fndecl (exp);
3274 if (!validate_arglist (exp,
3275 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3276 return NULL_RTX;
3277 else
3279 tree dest = CALL_EXPR_ARG (exp, 0);
3280 tree src = CALL_EXPR_ARG (exp, 1);
3281 tree len = CALL_EXPR_ARG (exp, 2);
3282 const char *src_str;
3283 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3284 unsigned int dest_align
3285 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3286 rtx dest_mem, src_mem, dest_addr, len_rtx;
3287 tree result = fold_builtin_memory_op (dest, src, len,
3288 TREE_TYPE (TREE_TYPE (fndecl)),
3289 false, /*endp=*/0);
3290 HOST_WIDE_INT expected_size = -1;
3291 unsigned int expected_align = 0;
3293 if (result)
3295 while (TREE_CODE (result) == COMPOUND_EXPR)
3297 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3298 EXPAND_NORMAL);
3299 result = TREE_OPERAND (result, 1);
3301 return expand_expr (result, target, mode, EXPAND_NORMAL);
3304 /* If DEST is not a pointer type, call the normal function. */
3305 if (dest_align == 0)
3306 return NULL_RTX;
3308 /* If either SRC is not a pointer type, don't do this
3309 operation in-line. */
3310 if (src_align == 0)
3311 return NULL_RTX;
3313 stringop_block_profile (exp, &expected_align, &expected_size);
3314 if (expected_align < dest_align)
3315 expected_align = dest_align;
3316 dest_mem = get_memory_rtx (dest, len);
3317 set_mem_align (dest_mem, dest_align);
3318 len_rtx = expand_normal (len);
3319 src_str = c_getstr (src);
3321 /* If SRC is a string constant and block move would be done
3322 by pieces, we can avoid loading the string from memory
3323 and only stored the computed constants. */
3324 if (src_str
3325 && GET_CODE (len_rtx) == CONST_INT
3326 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3327 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3328 CONST_CAST (char *, src_str),
3329 dest_align, false))
3331 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3332 builtin_memcpy_read_str,
3333 CONST_CAST (char *, src_str),
3334 dest_align, false, 0);
3335 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3336 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3337 return dest_mem;
3340 src_mem = get_memory_rtx (src, len);
3341 set_mem_align (src_mem, src_align);
3343 /* Copy word part most expediently. */
3344 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3345 CALL_EXPR_TAILCALL (exp)
3346 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3347 expected_align, expected_size);
3349 if (dest_addr == 0)
3351 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3352 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3354 return dest_addr;
3358 /* Expand a call EXP to the mempcpy builtin.
3359 Return NULL_RTX if we failed; the caller should emit a normal call,
3360 otherwise try to get the result in TARGET, if convenient (and in
3361 mode MODE if that's convenient). If ENDP is 0 return the
3362 destination pointer, if ENDP is 1 return the end pointer ala
3363 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3364 stpcpy. */
3366 static rtx
3367 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3369 if (!validate_arglist (exp,
3370 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3371 return NULL_RTX;
3372 else
3374 tree dest = CALL_EXPR_ARG (exp, 0);
3375 tree src = CALL_EXPR_ARG (exp, 1);
3376 tree len = CALL_EXPR_ARG (exp, 2);
3377 return expand_builtin_mempcpy_args (dest, src, len,
3378 TREE_TYPE (exp),
3379 target, mode, /*endp=*/ 1);
3383 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3384 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3385 so that this can also be called without constructing an actual CALL_EXPR.
3386 TYPE is the return type of the call. The other arguments and return value
3387 are the same as for expand_builtin_mempcpy. */
3389 static rtx
3390 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3391 rtx target, enum machine_mode mode, int endp)
3393 /* If return value is ignored, transform mempcpy into memcpy. */
3394 if (target == const0_rtx)
3396 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3398 if (!fn)
3399 return NULL_RTX;
3401 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3402 target, mode, EXPAND_NORMAL);
3404 else
3406 const char *src_str;
3407 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3408 unsigned int dest_align
3409 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3410 rtx dest_mem, src_mem, len_rtx;
3411 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3413 if (result)
3415 while (TREE_CODE (result) == COMPOUND_EXPR)
3417 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3418 EXPAND_NORMAL);
3419 result = TREE_OPERAND (result, 1);
3421 return expand_expr (result, target, mode, EXPAND_NORMAL);
3424 /* If either SRC or DEST is not a pointer type, don't do this
3425 operation in-line. */
3426 if (dest_align == 0 || src_align == 0)
3427 return NULL_RTX;
3429 /* If LEN is not constant, call the normal function. */
3430 if (! host_integerp (len, 1))
3431 return NULL_RTX;
3433 len_rtx = expand_normal (len);
3434 src_str = c_getstr (src);
3436 /* If SRC is a string constant and block move would be done
3437 by pieces, we can avoid loading the string from memory
3438 and only stored the computed constants. */
3439 if (src_str
3440 && GET_CODE (len_rtx) == CONST_INT
3441 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3442 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3443 CONST_CAST (char *, src_str),
3444 dest_align, false))
3446 dest_mem = get_memory_rtx (dest, len);
3447 set_mem_align (dest_mem, dest_align);
3448 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3449 builtin_memcpy_read_str,
3450 CONST_CAST (char *, src_str),
3451 dest_align, false, endp);
3452 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3453 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3454 return dest_mem;
3457 if (GET_CODE (len_rtx) == CONST_INT
3458 && can_move_by_pieces (INTVAL (len_rtx),
3459 MIN (dest_align, src_align)))
3461 dest_mem = get_memory_rtx (dest, len);
3462 set_mem_align (dest_mem, dest_align);
3463 src_mem = get_memory_rtx (src, len);
3464 set_mem_align (src_mem, src_align);
3465 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3466 MIN (dest_align, src_align), endp);
3467 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3468 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3469 return dest_mem;
3472 return NULL_RTX;
3476 /* Expand expression EXP, which is a call to the memmove builtin. Return
3477 NULL_RTX if we failed; the caller should emit a normal call. */
3479 static rtx
3480 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3482 if (!validate_arglist (exp,
3483 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3484 return NULL_RTX;
3485 else
3487 tree dest = CALL_EXPR_ARG (exp, 0);
3488 tree src = CALL_EXPR_ARG (exp, 1);
3489 tree len = CALL_EXPR_ARG (exp, 2);
3490 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3491 target, mode, ignore);
3495 /* Helper function to do the actual work for expand_builtin_memmove. The
3496 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3497 so that this can also be called without constructing an actual CALL_EXPR.
3498 TYPE is the return type of the call. The other arguments and return value
3499 are the same as for expand_builtin_memmove. */
3501 static rtx
3502 expand_builtin_memmove_args (tree dest, tree src, tree len,
3503 tree type, rtx target, enum machine_mode mode,
3504 int ignore)
3506 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3508 if (result)
3510 STRIP_TYPE_NOPS (result);
3511 while (TREE_CODE (result) == COMPOUND_EXPR)
3513 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3514 EXPAND_NORMAL);
3515 result = TREE_OPERAND (result, 1);
3517 return expand_expr (result, target, mode, EXPAND_NORMAL);
3520 /* Otherwise, call the normal function. */
3521 return NULL_RTX;
3524 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3525 NULL_RTX if we failed the caller should emit a normal call. */
3527 static rtx
3528 expand_builtin_bcopy (tree exp, int ignore)
3530 tree type = TREE_TYPE (exp);
3531 tree src, dest, size;
3533 if (!validate_arglist (exp,
3534 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3535 return NULL_RTX;
3537 src = CALL_EXPR_ARG (exp, 0);
3538 dest = CALL_EXPR_ARG (exp, 1);
3539 size = CALL_EXPR_ARG (exp, 2);
3541 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3542 This is done this way so that if it isn't expanded inline, we fall
3543 back to calling bcopy instead of memmove. */
3544 return expand_builtin_memmove_args (dest, src,
3545 fold_convert (sizetype, size),
3546 type, const0_rtx, VOIDmode,
3547 ignore);
3550 #ifndef HAVE_movstr
3551 # define HAVE_movstr 0
3552 # define CODE_FOR_movstr CODE_FOR_nothing
3553 #endif
3555 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3556 we failed, the caller should emit a normal call, otherwise try to
3557 get the result in TARGET, if convenient. If ENDP is 0 return the
3558 destination pointer, if ENDP is 1 return the end pointer ala
3559 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3560 stpcpy. */
3562 static rtx
3563 expand_movstr (tree dest, tree src, rtx target, int endp)
3565 rtx end;
3566 rtx dest_mem;
3567 rtx src_mem;
3568 rtx insn;
3569 const struct insn_data * data;
3571 if (!HAVE_movstr)
3572 return NULL_RTX;
3574 dest_mem = get_memory_rtx (dest, NULL);
3575 src_mem = get_memory_rtx (src, NULL);
3576 if (!endp)
3578 target = force_reg (Pmode, XEXP (dest_mem, 0));
3579 dest_mem = replace_equiv_address (dest_mem, target);
3580 end = gen_reg_rtx (Pmode);
3582 else
3584 if (target == 0 || target == const0_rtx)
3586 end = gen_reg_rtx (Pmode);
3587 if (target == 0)
3588 target = end;
3590 else
3591 end = target;
3594 data = insn_data + CODE_FOR_movstr;
3596 if (data->operand[0].mode != VOIDmode)
3597 end = gen_lowpart (data->operand[0].mode, end);
3599 insn = data->genfun (end, dest_mem, src_mem);
3601 gcc_assert (insn);
3603 emit_insn (insn);
3605 /* movstr is supposed to set end to the address of the NUL
3606 terminator. If the caller requested a mempcpy-like return value,
3607 adjust it. */
3608 if (endp == 1 && target != const0_rtx)
3610 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3611 emit_move_insn (target, force_operand (tem, NULL_RTX));
3614 return target;
3617 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3618 NULL_RTX if we failed the caller should emit a normal call, otherwise
3619 try to get the result in TARGET, if convenient (and in mode MODE if that's
3620 convenient). */
3622 static rtx
3623 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3625 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3627 tree dest = CALL_EXPR_ARG (exp, 0);
3628 tree src = CALL_EXPR_ARG (exp, 1);
3629 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3631 return NULL_RTX;
3634 /* Helper function to do the actual work for expand_builtin_strcpy. The
3635 arguments to the builtin_strcpy call DEST and SRC are broken out
3636 so that this can also be called without constructing an actual CALL_EXPR.
3637 The other arguments and return value are the same as for
3638 expand_builtin_strcpy. */
3640 static rtx
3641 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3642 rtx target, enum machine_mode mode)
3644 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3645 if (result)
3646 return expand_expr (result, target, mode, EXPAND_NORMAL);
3647 return expand_movstr (dest, src, target, /*endp=*/0);
3651 /* Expand a call EXP to the stpcpy builtin.
3652 Return NULL_RTX if we failed the caller should emit a normal call,
3653 otherwise try to get the result in TARGET, if convenient (and in
3654 mode MODE if that's convenient). */
3656 static rtx
3657 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3659 tree dst, src;
3661 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3662 return NULL_RTX;
3664 dst = CALL_EXPR_ARG (exp, 0);
3665 src = CALL_EXPR_ARG (exp, 1);
3667 /* If return value is ignored, transform stpcpy into strcpy. */
3668 if (target == const0_rtx)
3670 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3671 if (!fn)
3672 return NULL_RTX;
3674 return expand_expr (build_call_expr (fn, 2, dst, src),
3675 target, mode, EXPAND_NORMAL);
3677 else
3679 tree len, lenp1;
3680 rtx ret;
3682 /* Ensure we get an actual string whose length can be evaluated at
3683 compile-time, not an expression containing a string. This is
3684 because the latter will potentially produce pessimized code
3685 when used to produce the return value. */
3686 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3687 return expand_movstr (dst, src, target, /*endp=*/2);
3689 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3690 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3691 target, mode, /*endp=*/2);
3693 if (ret)
3694 return ret;
3696 if (TREE_CODE (len) == INTEGER_CST)
3698 rtx len_rtx = expand_normal (len);
3700 if (GET_CODE (len_rtx) == CONST_INT)
3702 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3703 dst, src, target, mode);
3705 if (ret)
3707 if (! target)
3709 if (mode != VOIDmode)
3710 target = gen_reg_rtx (mode);
3711 else
3712 target = gen_reg_rtx (GET_MODE (ret));
3714 if (GET_MODE (target) != GET_MODE (ret))
3715 ret = gen_lowpart (GET_MODE (target), ret);
3717 ret = plus_constant (ret, INTVAL (len_rtx));
3718 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3719 gcc_assert (ret);
3721 return target;
3726 return expand_movstr (dst, src, target, /*endp=*/2);
3730 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3731 bytes from constant string DATA + OFFSET and return it as target
3732 constant. */
3735 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3736 enum machine_mode mode)
3738 const char *str = (const char *) data;
3740 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3741 return const0_rtx;
3743 return c_readstr (str + offset, mode);
3746 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3747 NULL_RTX if we failed the caller should emit a normal call. */
3749 static rtx
3750 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3752 tree fndecl = get_callee_fndecl (exp);
3754 if (validate_arglist (exp,
3755 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3757 tree dest = CALL_EXPR_ARG (exp, 0);
3758 tree src = CALL_EXPR_ARG (exp, 1);
3759 tree len = CALL_EXPR_ARG (exp, 2);
3760 tree slen = c_strlen (src, 1);
3761 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3763 if (result)
3765 while (TREE_CODE (result) == COMPOUND_EXPR)
3767 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3768 EXPAND_NORMAL);
3769 result = TREE_OPERAND (result, 1);
3771 return expand_expr (result, target, mode, EXPAND_NORMAL);
3774 /* We must be passed a constant len and src parameter. */
3775 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3776 return NULL_RTX;
3778 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3780 /* We're required to pad with trailing zeros if the requested
3781 len is greater than strlen(s2)+1. In that case try to
3782 use store_by_pieces, if it fails, punt. */
3783 if (tree_int_cst_lt (slen, len))
3785 unsigned int dest_align
3786 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3787 const char *p = c_getstr (src);
3788 rtx dest_mem;
3790 if (!p || dest_align == 0 || !host_integerp (len, 1)
3791 || !can_store_by_pieces (tree_low_cst (len, 1),
3792 builtin_strncpy_read_str,
3793 CONST_CAST (char *, p),
3794 dest_align, false))
3795 return NULL_RTX;
3797 dest_mem = get_memory_rtx (dest, len);
3798 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3799 builtin_strncpy_read_str,
3800 CONST_CAST (char *, p), dest_align, false, 0);
3801 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3802 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3803 return dest_mem;
3806 return NULL_RTX;
3809 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3810 bytes from constant string DATA + OFFSET and return it as target
3811 constant. */
3814 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3815 enum machine_mode mode)
3817 const char *c = (const char *) data;
3818 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3820 memset (p, *c, GET_MODE_SIZE (mode));
3822 return c_readstr (p, mode);
3825 /* Callback routine for store_by_pieces. Return the RTL of a register
3826 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3827 char value given in the RTL register data. For example, if mode is
3828 4 bytes wide, return the RTL for 0x01010101*data. */
3830 static rtx
3831 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3832 enum machine_mode mode)
3834 rtx target, coeff;
3835 size_t size;
3836 char *p;
3838 size = GET_MODE_SIZE (mode);
3839 if (size == 1)
3840 return (rtx) data;
3842 p = XALLOCAVEC (char, size);
3843 memset (p, 1, size);
3844 coeff = c_readstr (p, mode);
3846 target = convert_to_mode (mode, (rtx) data, 1);
3847 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3848 return force_reg (mode, target);
3851 /* Expand expression EXP, which is a call to the memset builtin. Return
3852 NULL_RTX if we failed the caller should emit a normal call, otherwise
3853 try to get the result in TARGET, if convenient (and in mode MODE if that's
3854 convenient). */
3856 static rtx
3857 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3859 if (!validate_arglist (exp,
3860 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3861 return NULL_RTX;
3862 else
3864 tree dest = CALL_EXPR_ARG (exp, 0);
3865 tree val = CALL_EXPR_ARG (exp, 1);
3866 tree len = CALL_EXPR_ARG (exp, 2);
3867 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3871 /* Helper function to do the actual work for expand_builtin_memset. The
3872 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3873 so that this can also be called without constructing an actual CALL_EXPR.
3874 The other arguments and return value are the same as for
3875 expand_builtin_memset. */
3877 static rtx
3878 expand_builtin_memset_args (tree dest, tree val, tree len,
3879 rtx target, enum machine_mode mode, tree orig_exp)
3881 tree fndecl, fn;
3882 enum built_in_function fcode;
3883 char c;
3884 unsigned int dest_align;
3885 rtx dest_mem, dest_addr, len_rtx;
3886 HOST_WIDE_INT expected_size = -1;
3887 unsigned int expected_align = 0;
3889 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3891 /* If DEST is not a pointer type, don't do this operation in-line. */
3892 if (dest_align == 0)
3893 return NULL_RTX;
3895 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3896 if (expected_align < dest_align)
3897 expected_align = dest_align;
3899 /* If the LEN parameter is zero, return DEST. */
3900 if (integer_zerop (len))
3902 /* Evaluate and ignore VAL in case it has side-effects. */
3903 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3904 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3907 /* Stabilize the arguments in case we fail. */
3908 dest = builtin_save_expr (dest);
3909 val = builtin_save_expr (val);
3910 len = builtin_save_expr (len);
3912 len_rtx = expand_normal (len);
3913 dest_mem = get_memory_rtx (dest, len);
3915 if (TREE_CODE (val) != INTEGER_CST)
3917 rtx val_rtx;
3919 val_rtx = expand_normal (val);
3920 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3921 val_rtx, 0);
3923 /* Assume that we can memset by pieces if we can store
3924 * the coefficients by pieces (in the required modes).
3925 * We can't pass builtin_memset_gen_str as that emits RTL. */
3926 c = 1;
3927 if (host_integerp (len, 1)
3928 && can_store_by_pieces (tree_low_cst (len, 1),
3929 builtin_memset_read_str, &c, dest_align,
3930 true))
3932 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3933 val_rtx);
3934 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3935 builtin_memset_gen_str, val_rtx, dest_align,
3936 true, 0);
3938 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3939 dest_align, expected_align,
3940 expected_size))
3941 goto do_libcall;
3943 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3944 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3945 return dest_mem;
3948 if (target_char_cast (val, &c))
3949 goto do_libcall;
3951 if (c)
3953 if (host_integerp (len, 1)
3954 && can_store_by_pieces (tree_low_cst (len, 1),
3955 builtin_memset_read_str, &c, dest_align,
3956 true))
3957 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3958 builtin_memset_read_str, &c, dest_align, true, 0);
3959 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3960 dest_align, expected_align,
3961 expected_size))
3962 goto do_libcall;
3964 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3965 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3966 return dest_mem;
3969 set_mem_align (dest_mem, dest_align);
3970 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3971 CALL_EXPR_TAILCALL (orig_exp)
3972 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3973 expected_align, expected_size);
3975 if (dest_addr == 0)
3977 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3978 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3981 return dest_addr;
3983 do_libcall:
3984 fndecl = get_callee_fndecl (orig_exp);
3985 fcode = DECL_FUNCTION_CODE (fndecl);
3986 if (fcode == BUILT_IN_MEMSET)
3987 fn = build_call_expr (fndecl, 3, dest, val, len);
3988 else if (fcode == BUILT_IN_BZERO)
3989 fn = build_call_expr (fndecl, 2, dest, len);
3990 else
3991 gcc_unreachable ();
3992 if (TREE_CODE (fn) == CALL_EXPR)
3993 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3994 return expand_call (fn, target, target == const0_rtx);
3997 /* Expand expression EXP, which is a call to the bzero builtin. Return
3998 NULL_RTX if we failed the caller should emit a normal call. */
4000 static rtx
4001 expand_builtin_bzero (tree exp)
4003 tree dest, size;
4005 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4006 return NULL_RTX;
4008 dest = CALL_EXPR_ARG (exp, 0);
4009 size = CALL_EXPR_ARG (exp, 1);
4011 /* New argument list transforming bzero(ptr x, int y) to
4012 memset(ptr x, int 0, size_t y). This is done this way
4013 so that if it isn't expanded inline, we fallback to
4014 calling bzero instead of memset. */
4016 return expand_builtin_memset_args (dest, integer_zero_node,
4017 fold_convert (sizetype, size),
4018 const0_rtx, VOIDmode, exp);
4021 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4022 caller should emit a normal call, otherwise try to get the result
4023 in TARGET, if convenient (and in mode MODE if that's convenient). */
4025 static rtx
4026 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4028 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4029 INTEGER_TYPE, VOID_TYPE))
4031 tree type = TREE_TYPE (exp);
4032 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4033 CALL_EXPR_ARG (exp, 1),
4034 CALL_EXPR_ARG (exp, 2), type);
4035 if (result)
4036 return expand_expr (result, target, mode, EXPAND_NORMAL);
4038 return NULL_RTX;
4041 /* Expand expression EXP, which is a call to the memcmp built-in function.
4042 Return NULL_RTX if we failed and the
4043 caller should emit a normal call, otherwise try to get the result in
4044 TARGET, if convenient (and in mode MODE, if that's convenient). */
4046 static rtx
4047 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4049 if (!validate_arglist (exp,
4050 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4051 return NULL_RTX;
4052 else
4054 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4055 CALL_EXPR_ARG (exp, 1),
4056 CALL_EXPR_ARG (exp, 2));
4057 if (result)
4058 return expand_expr (result, target, mode, EXPAND_NORMAL);
4061 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4063 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4064 rtx result;
4065 rtx insn;
4066 tree arg1 = CALL_EXPR_ARG (exp, 0);
4067 tree arg2 = CALL_EXPR_ARG (exp, 1);
4068 tree len = CALL_EXPR_ARG (exp, 2);
4070 int arg1_align
4071 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4072 int arg2_align
4073 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4074 enum machine_mode insn_mode;
4076 #ifdef HAVE_cmpmemsi
4077 if (HAVE_cmpmemsi)
4078 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4079 else
4080 #endif
4081 #ifdef HAVE_cmpstrnsi
4082 if (HAVE_cmpstrnsi)
4083 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4084 else
4085 #endif
4086 return NULL_RTX;
4088 /* If we don't have POINTER_TYPE, call the function. */
4089 if (arg1_align == 0 || arg2_align == 0)
4090 return NULL_RTX;
4092 /* Make a place to write the result of the instruction. */
4093 result = target;
4094 if (! (result != 0
4095 && REG_P (result) && GET_MODE (result) == insn_mode
4096 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4097 result = gen_reg_rtx (insn_mode);
4099 arg1_rtx = get_memory_rtx (arg1, len);
4100 arg2_rtx = get_memory_rtx (arg2, len);
4101 arg3_rtx = expand_normal (len);
4103 /* Set MEM_SIZE as appropriate. */
4104 if (GET_CODE (arg3_rtx) == CONST_INT)
4106 set_mem_size (arg1_rtx, arg3_rtx);
4107 set_mem_size (arg2_rtx, arg3_rtx);
4110 #ifdef HAVE_cmpmemsi
4111 if (HAVE_cmpmemsi)
4112 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4113 GEN_INT (MIN (arg1_align, arg2_align)));
4114 else
4115 #endif
4116 #ifdef HAVE_cmpstrnsi
4117 if (HAVE_cmpstrnsi)
4118 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4119 GEN_INT (MIN (arg1_align, arg2_align)));
4120 else
4121 #endif
4122 gcc_unreachable ();
4124 if (insn)
4125 emit_insn (insn);
4126 else
4127 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4128 TYPE_MODE (integer_type_node), 3,
4129 XEXP (arg1_rtx, 0), Pmode,
4130 XEXP (arg2_rtx, 0), Pmode,
4131 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4132 TYPE_UNSIGNED (sizetype)),
4133 TYPE_MODE (sizetype));
4135 /* Return the value in the proper mode for this function. */
4136 mode = TYPE_MODE (TREE_TYPE (exp));
4137 if (GET_MODE (result) == mode)
4138 return result;
4139 else if (target != 0)
4141 convert_move (target, result, 0);
4142 return target;
4144 else
4145 return convert_to_mode (mode, result, 0);
4147 #endif
4149 return NULL_RTX;
4152 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4153 if we failed the caller should emit a normal call, otherwise try to get
4154 the result in TARGET, if convenient. */
4156 static rtx
4157 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4159 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4160 return NULL_RTX;
4161 else
4163 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4164 CALL_EXPR_ARG (exp, 1));
4165 if (result)
4166 return expand_expr (result, target, mode, EXPAND_NORMAL);
4169 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4170 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4171 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4173 rtx arg1_rtx, arg2_rtx;
4174 rtx result, insn = NULL_RTX;
4175 tree fndecl, fn;
4176 tree arg1 = CALL_EXPR_ARG (exp, 0);
4177 tree arg2 = CALL_EXPR_ARG (exp, 1);
4179 int arg1_align
4180 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4181 int arg2_align
4182 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4184 /* If we don't have POINTER_TYPE, call the function. */
4185 if (arg1_align == 0 || arg2_align == 0)
4186 return NULL_RTX;
4188 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4189 arg1 = builtin_save_expr (arg1);
4190 arg2 = builtin_save_expr (arg2);
4192 arg1_rtx = get_memory_rtx (arg1, NULL);
4193 arg2_rtx = get_memory_rtx (arg2, NULL);
4195 #ifdef HAVE_cmpstrsi
4196 /* Try to call cmpstrsi. */
4197 if (HAVE_cmpstrsi)
4199 enum machine_mode insn_mode
4200 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4202 /* Make a place to write the result of the instruction. */
4203 result = target;
4204 if (! (result != 0
4205 && REG_P (result) && GET_MODE (result) == insn_mode
4206 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4207 result = gen_reg_rtx (insn_mode);
4209 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4210 GEN_INT (MIN (arg1_align, arg2_align)));
4212 #endif
4213 #ifdef HAVE_cmpstrnsi
4214 /* Try to determine at least one length and call cmpstrnsi. */
4215 if (!insn && HAVE_cmpstrnsi)
4217 tree len;
4218 rtx arg3_rtx;
4220 enum machine_mode insn_mode
4221 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4222 tree len1 = c_strlen (arg1, 1);
4223 tree len2 = c_strlen (arg2, 1);
4225 if (len1)
4226 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4227 if (len2)
4228 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4230 /* If we don't have a constant length for the first, use the length
4231 of the second, if we know it. We don't require a constant for
4232 this case; some cost analysis could be done if both are available
4233 but neither is constant. For now, assume they're equally cheap,
4234 unless one has side effects. If both strings have constant lengths,
4235 use the smaller. */
4237 if (!len1)
4238 len = len2;
4239 else if (!len2)
4240 len = len1;
4241 else if (TREE_SIDE_EFFECTS (len1))
4242 len = len2;
4243 else if (TREE_SIDE_EFFECTS (len2))
4244 len = len1;
4245 else if (TREE_CODE (len1) != INTEGER_CST)
4246 len = len2;
4247 else if (TREE_CODE (len2) != INTEGER_CST)
4248 len = len1;
4249 else if (tree_int_cst_lt (len1, len2))
4250 len = len1;
4251 else
4252 len = len2;
4254 /* If both arguments have side effects, we cannot optimize. */
4255 if (!len || TREE_SIDE_EFFECTS (len))
4256 goto do_libcall;
4258 arg3_rtx = expand_normal (len);
4260 /* Make a place to write the result of the instruction. */
4261 result = target;
4262 if (! (result != 0
4263 && REG_P (result) && GET_MODE (result) == insn_mode
4264 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4265 result = gen_reg_rtx (insn_mode);
4267 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4268 GEN_INT (MIN (arg1_align, arg2_align)));
4270 #endif
4272 if (insn)
4274 emit_insn (insn);
4276 /* Return the value in the proper mode for this function. */
4277 mode = TYPE_MODE (TREE_TYPE (exp));
4278 if (GET_MODE (result) == mode)
4279 return result;
4280 if (target == 0)
4281 return convert_to_mode (mode, result, 0);
4282 convert_move (target, result, 0);
4283 return target;
4286 /* Expand the library call ourselves using a stabilized argument
4287 list to avoid re-evaluating the function's arguments twice. */
4288 #ifdef HAVE_cmpstrnsi
4289 do_libcall:
4290 #endif
4291 fndecl = get_callee_fndecl (exp);
4292 fn = build_call_expr (fndecl, 2, arg1, arg2);
4293 if (TREE_CODE (fn) == CALL_EXPR)
4294 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4295 return expand_call (fn, target, target == const0_rtx);
4297 #endif
4298 return NULL_RTX;
4301 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4302 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4303 the result in TARGET, if convenient. */
4305 static rtx
4306 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4308 if (!validate_arglist (exp,
4309 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4310 return NULL_RTX;
4311 else
4313 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4314 CALL_EXPR_ARG (exp, 1),
4315 CALL_EXPR_ARG (exp, 2));
4316 if (result)
4317 return expand_expr (result, target, mode, EXPAND_NORMAL);
4320 /* If c_strlen can determine an expression for one of the string
4321 lengths, and it doesn't have side effects, then emit cmpstrnsi
4322 using length MIN(strlen(string)+1, arg3). */
4323 #ifdef HAVE_cmpstrnsi
4324 if (HAVE_cmpstrnsi)
4326 tree len, len1, len2;
4327 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4328 rtx result, insn;
4329 tree fndecl, fn;
4330 tree arg1 = CALL_EXPR_ARG (exp, 0);
4331 tree arg2 = CALL_EXPR_ARG (exp, 1);
4332 tree arg3 = CALL_EXPR_ARG (exp, 2);
4334 int arg1_align
4335 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4336 int arg2_align
4337 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4338 enum machine_mode insn_mode
4339 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4341 len1 = c_strlen (arg1, 1);
4342 len2 = c_strlen (arg2, 1);
4344 if (len1)
4345 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4346 if (len2)
4347 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4349 /* If we don't have a constant length for the first, use the length
4350 of the second, if we know it. We don't require a constant for
4351 this case; some cost analysis could be done if both are available
4352 but neither is constant. For now, assume they're equally cheap,
4353 unless one has side effects. If both strings have constant lengths,
4354 use the smaller. */
4356 if (!len1)
4357 len = len2;
4358 else if (!len2)
4359 len = len1;
4360 else if (TREE_SIDE_EFFECTS (len1))
4361 len = len2;
4362 else if (TREE_SIDE_EFFECTS (len2))
4363 len = len1;
4364 else if (TREE_CODE (len1) != INTEGER_CST)
4365 len = len2;
4366 else if (TREE_CODE (len2) != INTEGER_CST)
4367 len = len1;
4368 else if (tree_int_cst_lt (len1, len2))
4369 len = len1;
4370 else
4371 len = len2;
4373 /* If both arguments have side effects, we cannot optimize. */
4374 if (!len || TREE_SIDE_EFFECTS (len))
4375 return NULL_RTX;
4377 /* The actual new length parameter is MIN(len,arg3). */
4378 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4379 fold_convert (TREE_TYPE (len), arg3));
4381 /* If we don't have POINTER_TYPE, call the function. */
4382 if (arg1_align == 0 || arg2_align == 0)
4383 return NULL_RTX;
4385 /* Make a place to write the result of the instruction. */
4386 result = target;
4387 if (! (result != 0
4388 && REG_P (result) && GET_MODE (result) == insn_mode
4389 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4390 result = gen_reg_rtx (insn_mode);
4392 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4393 arg1 = builtin_save_expr (arg1);
4394 arg2 = builtin_save_expr (arg2);
4395 len = builtin_save_expr (len);
4397 arg1_rtx = get_memory_rtx (arg1, len);
4398 arg2_rtx = get_memory_rtx (arg2, len);
4399 arg3_rtx = expand_normal (len);
4400 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4401 GEN_INT (MIN (arg1_align, arg2_align)));
4402 if (insn)
4404 emit_insn (insn);
4406 /* Return the value in the proper mode for this function. */
4407 mode = TYPE_MODE (TREE_TYPE (exp));
4408 if (GET_MODE (result) == mode)
4409 return result;
4410 if (target == 0)
4411 return convert_to_mode (mode, result, 0);
4412 convert_move (target, result, 0);
4413 return target;
4416 /* Expand the library call ourselves using a stabilized argument
4417 list to avoid re-evaluating the function's arguments twice. */
4418 fndecl = get_callee_fndecl (exp);
4419 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4420 if (TREE_CODE (fn) == CALL_EXPR)
4421 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4422 return expand_call (fn, target, target == const0_rtx);
4424 #endif
4425 return NULL_RTX;
4428 /* Expand expression EXP, which is a call to the strcat builtin.
4429 Return NULL_RTX if we failed the caller should emit a normal call,
4430 otherwise try to get the result in TARGET, if convenient. */
4432 static rtx
4433 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4435 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4436 return NULL_RTX;
4437 else
4439 tree dst = CALL_EXPR_ARG (exp, 0);
4440 tree src = CALL_EXPR_ARG (exp, 1);
4441 const char *p = c_getstr (src);
4443 /* If the string length is zero, return the dst parameter. */
4444 if (p && *p == '\0')
4445 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4447 if (!optimize_size)
4449 /* See if we can store by pieces into (dst + strlen(dst)). */
4450 tree newsrc, newdst,
4451 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4452 rtx insns;
4454 /* Stabilize the argument list. */
4455 newsrc = builtin_save_expr (src);
4456 dst = builtin_save_expr (dst);
4458 start_sequence ();
4460 /* Create strlen (dst). */
4461 newdst = build_call_expr (strlen_fn, 1, dst);
4462 /* Create (dst p+ strlen (dst)). */
4464 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4465 newdst = builtin_save_expr (newdst);
4467 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4469 end_sequence (); /* Stop sequence. */
4470 return NULL_RTX;
4473 /* Output the entire sequence. */
4474 insns = get_insns ();
4475 end_sequence ();
4476 emit_insn (insns);
4478 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4481 return NULL_RTX;
4485 /* Expand expression EXP, which is a call to the strncat builtin.
4486 Return NULL_RTX if we failed the caller should emit a normal call,
4487 otherwise try to get the result in TARGET, if convenient. */
4489 static rtx
4490 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4492 if (validate_arglist (exp,
4493 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4495 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4496 CALL_EXPR_ARG (exp, 1),
4497 CALL_EXPR_ARG (exp, 2));
4498 if (result)
4499 return expand_expr (result, target, mode, EXPAND_NORMAL);
4501 return NULL_RTX;
4504 /* Expand expression EXP, which is a call to the strspn builtin.
4505 Return NULL_RTX if we failed the caller should emit a normal call,
4506 otherwise try to get the result in TARGET, if convenient. */
4508 static rtx
4509 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4511 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4513 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4514 CALL_EXPR_ARG (exp, 1));
4515 if (result)
4516 return expand_expr (result, target, mode, EXPAND_NORMAL);
4518 return NULL_RTX;
4521 /* Expand expression EXP, which is a call to the strcspn builtin.
4522 Return NULL_RTX if we failed the caller should emit a normal call,
4523 otherwise try to get the result in TARGET, if convenient. */
4525 static rtx
4526 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4528 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4530 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4531 CALL_EXPR_ARG (exp, 1));
4532 if (result)
4533 return expand_expr (result, target, mode, EXPAND_NORMAL);
4535 return NULL_RTX;
4538 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4539 if that's convenient. */
4542 expand_builtin_saveregs (void)
4544 rtx val, seq;
4546 /* Don't do __builtin_saveregs more than once in a function.
4547 Save the result of the first call and reuse it. */
4548 if (saveregs_value != 0)
4549 return saveregs_value;
4551 /* When this function is called, it means that registers must be
4552 saved on entry to this function. So we migrate the call to the
4553 first insn of this function. */
4555 start_sequence ();
4557 /* Do whatever the machine needs done in this case. */
4558 val = targetm.calls.expand_builtin_saveregs ();
4560 seq = get_insns ();
4561 end_sequence ();
4563 saveregs_value = val;
4565 /* Put the insns after the NOTE that starts the function. If this
4566 is inside a start_sequence, make the outer-level insn chain current, so
4567 the code is placed at the start of the function. */
4568 push_topmost_sequence ();
4569 emit_insn_after (seq, entry_of_function ());
4570 pop_topmost_sequence ();
4572 return val;
4575 /* __builtin_args_info (N) returns word N of the arg space info
4576 for the current function. The number and meanings of words
4577 is controlled by the definition of CUMULATIVE_ARGS. */
4579 static rtx
4580 expand_builtin_args_info (tree exp)
4582 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4583 int *word_ptr = (int *) &crtl->args.info;
4585 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4587 if (call_expr_nargs (exp) != 0)
4589 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4590 error ("argument of %<__builtin_args_info%> must be constant");
4591 else
4593 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4595 if (wordnum < 0 || wordnum >= nwords)
4596 error ("argument of %<__builtin_args_info%> out of range");
4597 else
4598 return GEN_INT (word_ptr[wordnum]);
4601 else
4602 error ("missing argument in %<__builtin_args_info%>");
4604 return const0_rtx;
4607 /* Expand a call to __builtin_next_arg. */
4609 static rtx
4610 expand_builtin_next_arg (void)
4612 /* Checking arguments is already done in fold_builtin_next_arg
4613 that must be called before this function. */
4614 return expand_binop (ptr_mode, add_optab,
4615 crtl->args.internal_arg_pointer,
4616 crtl->args.arg_offset_rtx,
4617 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4620 /* Make it easier for the backends by protecting the valist argument
4621 from multiple evaluations. */
4623 static tree
4624 stabilize_va_list (tree valist, int needs_lvalue)
4626 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4628 gcc_assert (vatype != NULL_TREE);
4630 if (TREE_CODE (vatype) == ARRAY_TYPE)
4632 if (TREE_SIDE_EFFECTS (valist))
4633 valist = save_expr (valist);
4635 /* For this case, the backends will be expecting a pointer to
4636 vatype, but it's possible we've actually been given an array
4637 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4638 So fix it. */
4639 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4641 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4642 valist = build_fold_addr_expr_with_type (valist, p1);
4645 else
4647 tree pt;
4649 if (! needs_lvalue)
4651 if (! TREE_SIDE_EFFECTS (valist))
4652 return valist;
4654 pt = build_pointer_type (vatype);
4655 valist = fold_build1 (ADDR_EXPR, pt, valist);
4656 TREE_SIDE_EFFECTS (valist) = 1;
4659 if (TREE_SIDE_EFFECTS (valist))
4660 valist = save_expr (valist);
4661 valist = build_fold_indirect_ref (valist);
4664 return valist;
4667 /* The "standard" definition of va_list is void*. */
4669 tree
4670 std_build_builtin_va_list (void)
4672 return ptr_type_node;
4675 /* The "standard" abi va_list is va_list_type_node. */
4677 tree
4678 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4680 return va_list_type_node;
4683 /* The "standard" type of va_list is va_list_type_node. */
4685 tree
4686 std_canonical_va_list_type (tree type)
4688 tree wtype, htype;
4690 if (INDIRECT_REF_P (type))
4691 type = TREE_TYPE (type);
4692 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4693 type = TREE_TYPE (type);
4695 wtype = va_list_type_node;
4696 htype = type;
4697 if (TREE_CODE (wtype) == ARRAY_TYPE)
4699 /* If va_list is an array type, the argument may have decayed
4700 to a pointer type, e.g. by being passed to another function.
4701 In that case, unwrap both types so that we can compare the
4702 underlying records. */
4703 if (TREE_CODE (htype) == ARRAY_TYPE
4704 || POINTER_TYPE_P (htype))
4706 wtype = TREE_TYPE (wtype);
4707 htype = TREE_TYPE (htype);
4710 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4711 return va_list_type_node;
4713 return NULL_TREE;
4716 /* The "standard" implementation of va_start: just assign `nextarg' to
4717 the variable. */
4719 void
4720 std_expand_builtin_va_start (tree valist, rtx nextarg)
4722 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4723 convert_move (va_r, nextarg, 0);
4726 /* Expand EXP, a call to __builtin_va_start. */
4728 static rtx
4729 expand_builtin_va_start (tree exp)
4731 rtx nextarg;
4732 tree valist;
4734 if (call_expr_nargs (exp) < 2)
4736 error ("too few arguments to function %<va_start%>");
4737 return const0_rtx;
4740 if (fold_builtin_next_arg (exp, true))
4741 return const0_rtx;
4743 nextarg = expand_builtin_next_arg ();
4744 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4746 if (targetm.expand_builtin_va_start)
4747 targetm.expand_builtin_va_start (valist, nextarg);
4748 else
4749 std_expand_builtin_va_start (valist, nextarg);
4751 return const0_rtx;
4754 /* The "standard" implementation of va_arg: read the value from the
4755 current (padded) address and increment by the (padded) size. */
4757 tree
4758 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4760 tree addr, t, type_size, rounded_size, valist_tmp;
4761 unsigned HOST_WIDE_INT align, boundary;
4762 bool indirect;
4764 #ifdef ARGS_GROW_DOWNWARD
4765 /* All of the alignment and movement below is for args-grow-up machines.
4766 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4767 implement their own specialized gimplify_va_arg_expr routines. */
4768 gcc_unreachable ();
4769 #endif
4771 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4772 if (indirect)
4773 type = build_pointer_type (type);
4775 align = PARM_BOUNDARY / BITS_PER_UNIT;
4776 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4778 /* Hoist the valist value into a temporary for the moment. */
4779 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4781 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4782 requires greater alignment, we must perform dynamic alignment. */
4783 if (boundary > align
4784 && !integer_zerop (TYPE_SIZE (type)))
4786 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4787 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4788 valist_tmp, size_int (boundary - 1)));
4789 gimplify_and_add (t, pre_p);
4791 t = fold_convert (sizetype, valist_tmp);
4792 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4793 fold_convert (TREE_TYPE (valist),
4794 fold_build2 (BIT_AND_EXPR, sizetype, t,
4795 size_int (-boundary))));
4796 gimplify_and_add (t, pre_p);
4798 else
4799 boundary = align;
4801 /* If the actual alignment is less than the alignment of the type,
4802 adjust the type accordingly so that we don't assume strict alignment
4803 when dereferencing the pointer. */
4804 boundary *= BITS_PER_UNIT;
4805 if (boundary < TYPE_ALIGN (type))
4807 type = build_variant_type_copy (type);
4808 TYPE_ALIGN (type) = boundary;
4811 /* Compute the rounded size of the type. */
4812 type_size = size_in_bytes (type);
4813 rounded_size = round_up (type_size, align);
4815 /* Reduce rounded_size so it's sharable with the postqueue. */
4816 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4818 /* Get AP. */
4819 addr = valist_tmp;
4820 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4822 /* Small args are padded downward. */
4823 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4824 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4825 size_binop (MINUS_EXPR, rounded_size, type_size));
4826 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4829 /* Compute new value for AP. */
4830 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4831 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4832 gimplify_and_add (t, pre_p);
4834 addr = fold_convert (build_pointer_type (type), addr);
4836 if (indirect)
4837 addr = build_va_arg_indirect_ref (addr);
4839 return build_va_arg_indirect_ref (addr);
4842 /* Build an indirect-ref expression over the given TREE, which represents a
4843 piece of a va_arg() expansion. */
4844 tree
4845 build_va_arg_indirect_ref (tree addr)
4847 addr = build_fold_indirect_ref (addr);
4849 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4850 mf_mark (addr);
4852 return addr;
4855 /* Return a dummy expression of type TYPE in order to keep going after an
4856 error. */
4858 static tree
4859 dummy_object (tree type)
4861 tree t = build_int_cst (build_pointer_type (type), 0);
4862 return build1 (INDIRECT_REF, type, t);
4865 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4866 builtin function, but a very special sort of operator. */
4868 enum gimplify_status
4869 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4871 tree promoted_type, have_va_type;
4872 tree valist = TREE_OPERAND (*expr_p, 0);
4873 tree type = TREE_TYPE (*expr_p);
4874 tree t;
4876 /* Verify that valist is of the proper type. */
4877 have_va_type = TREE_TYPE (valist);
4878 if (have_va_type == error_mark_node)
4879 return GS_ERROR;
4880 have_va_type = targetm.canonical_va_list_type (have_va_type);
4882 if (have_va_type == NULL_TREE)
4884 error ("first argument to %<va_arg%> not of type %<va_list%>");
4885 return GS_ERROR;
4888 /* Generate a diagnostic for requesting data of a type that cannot
4889 be passed through `...' due to type promotion at the call site. */
4890 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4891 != type)
4893 static bool gave_help;
4895 /* Unfortunately, this is merely undefined, rather than a constraint
4896 violation, so we cannot make this an error. If this call is never
4897 executed, the program is still strictly conforming. */
4898 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4899 type, promoted_type);
4900 if (! gave_help)
4902 gave_help = true;
4903 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4904 promoted_type, type);
4907 /* We can, however, treat "undefined" any way we please.
4908 Call abort to encourage the user to fix the program. */
4909 inform ("if this code is reached, the program will abort");
4910 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4911 append_to_statement_list (t, pre_p);
4913 /* This is dead code, but go ahead and finish so that the
4914 mode of the result comes out right. */
4915 *expr_p = dummy_object (type);
4916 return GS_ALL_DONE;
4918 else
4920 /* Make it easier for the backends by protecting the valist argument
4921 from multiple evaluations. */
4922 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4924 /* For this case, the backends will be expecting a pointer to
4925 TREE_TYPE (abi), but it's possible we've
4926 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4927 So fix it. */
4928 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4930 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4931 valist = build_fold_addr_expr_with_type (valist, p1);
4933 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4935 else
4936 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4938 if (!targetm.gimplify_va_arg_expr)
4939 /* FIXME:Once most targets are converted we should merely
4940 assert this is non-null. */
4941 return GS_ALL_DONE;
4943 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4944 return GS_OK;
4948 /* Expand EXP, a call to __builtin_va_end. */
4950 static rtx
4951 expand_builtin_va_end (tree exp)
4953 tree valist = CALL_EXPR_ARG (exp, 0);
4955 /* Evaluate for side effects, if needed. I hate macros that don't
4956 do that. */
4957 if (TREE_SIDE_EFFECTS (valist))
4958 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4960 return const0_rtx;
4963 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4964 builtin rather than just as an assignment in stdarg.h because of the
4965 nastiness of array-type va_list types. */
4967 static rtx
4968 expand_builtin_va_copy (tree exp)
4970 tree dst, src, t;
4972 dst = CALL_EXPR_ARG (exp, 0);
4973 src = CALL_EXPR_ARG (exp, 1);
4975 dst = stabilize_va_list (dst, 1);
4976 src = stabilize_va_list (src, 0);
4978 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4980 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4982 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4983 TREE_SIDE_EFFECTS (t) = 1;
4984 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4986 else
4988 rtx dstb, srcb, size;
4990 /* Evaluate to pointers. */
4991 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4992 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4993 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4994 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4996 dstb = convert_memory_address (Pmode, dstb);
4997 srcb = convert_memory_address (Pmode, srcb);
4999 /* "Dereference" to BLKmode memories. */
5000 dstb = gen_rtx_MEM (BLKmode, dstb);
5001 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5002 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5003 srcb = gen_rtx_MEM (BLKmode, srcb);
5004 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5005 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5007 /* Copy. */
5008 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5011 return const0_rtx;
5014 /* Expand a call to one of the builtin functions __builtin_frame_address or
5015 __builtin_return_address. */
5017 static rtx
5018 expand_builtin_frame_address (tree fndecl, tree exp)
5020 /* The argument must be a nonnegative integer constant.
5021 It counts the number of frames to scan up the stack.
5022 The value is the return address saved in that frame. */
5023 if (call_expr_nargs (exp) == 0)
5024 /* Warning about missing arg was already issued. */
5025 return const0_rtx;
5026 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5028 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5029 error ("invalid argument to %<__builtin_frame_address%>");
5030 else
5031 error ("invalid argument to %<__builtin_return_address%>");
5032 return const0_rtx;
5034 else
5036 rtx tem
5037 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5038 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5040 /* Some ports cannot access arbitrary stack frames. */
5041 if (tem == NULL)
5043 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5044 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5045 else
5046 warning (0, "unsupported argument to %<__builtin_return_address%>");
5047 return const0_rtx;
5050 /* For __builtin_frame_address, return what we've got. */
5051 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5052 return tem;
5054 if (!REG_P (tem)
5055 && ! CONSTANT_P (tem))
5056 tem = copy_to_mode_reg (Pmode, tem);
5057 return tem;
5061 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5062 we failed and the caller should emit a normal call, otherwise try to get
5063 the result in TARGET, if convenient. */
5065 static rtx
5066 expand_builtin_alloca (tree exp, rtx target)
5068 rtx op0;
5069 rtx result;
5071 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5072 should always expand to function calls. These can be intercepted
5073 in libmudflap. */
5074 if (flag_mudflap)
5075 return NULL_RTX;
5077 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5078 return NULL_RTX;
5080 /* Compute the argument. */
5081 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5083 /* Allocate the desired space. */
5084 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5085 result = convert_memory_address (ptr_mode, result);
5087 return result;
5090 /* Expand a call to a bswap builtin with argument ARG0. MODE
5091 is the mode to expand with. */
5093 static rtx
5094 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5096 enum machine_mode mode;
5097 tree arg;
5098 rtx op0;
5100 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5101 return NULL_RTX;
5103 arg = CALL_EXPR_ARG (exp, 0);
5104 mode = TYPE_MODE (TREE_TYPE (arg));
5105 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5107 target = expand_unop (mode, bswap_optab, op0, target, 1);
5109 gcc_assert (target);
5111 return convert_to_mode (mode, target, 0);
5114 /* Expand a call to a unary builtin in EXP.
5115 Return NULL_RTX if a normal call should be emitted rather than expanding the
5116 function in-line. If convenient, the result should be placed in TARGET.
5117 SUBTARGET may be used as the target for computing one of EXP's operands. */
5119 static rtx
5120 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5121 rtx subtarget, optab op_optab)
5123 rtx op0;
5125 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5126 return NULL_RTX;
5128 /* Compute the argument. */
5129 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5130 VOIDmode, EXPAND_NORMAL);
5131 /* Compute op, into TARGET if possible.
5132 Set TARGET to wherever the result comes back. */
5133 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5134 op_optab, op0, target, 1);
5135 gcc_assert (target);
5137 return convert_to_mode (target_mode, target, 0);
5140 /* If the string passed to fputs is a constant and is one character
5141 long, we attempt to transform this call into __builtin_fputc(). */
5143 static rtx
5144 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5146 /* Verify the arguments in the original call. */
5147 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5149 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5150 CALL_EXPR_ARG (exp, 1),
5151 (target == const0_rtx),
5152 unlocked, NULL_TREE);
5153 if (result)
5154 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5156 return NULL_RTX;
5159 /* Expand a call to __builtin_expect. We just return our argument
5160 as the builtin_expect semantic should've been already executed by
5161 tree branch prediction pass. */
5163 static rtx
5164 expand_builtin_expect (tree exp, rtx target)
5166 tree arg, c;
5168 if (call_expr_nargs (exp) < 2)
5169 return const0_rtx;
5170 arg = CALL_EXPR_ARG (exp, 0);
5171 c = CALL_EXPR_ARG (exp, 1);
5173 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5174 /* When guessing was done, the hints should be already stripped away. */
5175 gcc_assert (!flag_guess_branch_prob
5176 || optimize == 0 || errorcount || sorrycount);
5177 return target;
5180 void
5181 expand_builtin_trap (void)
5183 #ifdef HAVE_trap
5184 if (HAVE_trap)
5185 emit_insn (gen_trap ());
5186 else
5187 #endif
5188 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5189 emit_barrier ();
5192 /* Expand EXP, a call to fabs, fabsf or fabsl.
5193 Return NULL_RTX if a normal call should be emitted rather than expanding
5194 the function inline. If convenient, the result should be placed
5195 in TARGET. SUBTARGET may be used as the target for computing
5196 the operand. */
5198 static rtx
5199 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5201 enum machine_mode mode;
5202 tree arg;
5203 rtx op0;
5205 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5206 return NULL_RTX;
5208 arg = CALL_EXPR_ARG (exp, 0);
5209 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5210 mode = TYPE_MODE (TREE_TYPE (arg));
5211 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5212 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5215 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5216 Return NULL is a normal call should be emitted rather than expanding the
5217 function inline. If convenient, the result should be placed in TARGET.
5218 SUBTARGET may be used as the target for computing the operand. */
5220 static rtx
5221 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5223 rtx op0, op1;
5224 tree arg;
5226 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5227 return NULL_RTX;
5229 arg = CALL_EXPR_ARG (exp, 0);
5230 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5232 arg = CALL_EXPR_ARG (exp, 1);
5233 op1 = expand_normal (arg);
5235 return expand_copysign (op0, op1, target);
5238 /* Create a new constant string literal and return a char* pointer to it.
5239 The STRING_CST value is the LEN characters at STR. */
5240 tree
5241 build_string_literal (int len, const char *str)
5243 tree t, elem, index, type;
5245 t = build_string (len, str);
5246 elem = build_type_variant (char_type_node, 1, 0);
5247 index = build_index_type (size_int (len - 1));
5248 type = build_array_type (elem, index);
5249 TREE_TYPE (t) = type;
5250 TREE_CONSTANT (t) = 1;
5251 TREE_READONLY (t) = 1;
5252 TREE_STATIC (t) = 1;
5254 type = build_pointer_type (elem);
5255 t = build1 (ADDR_EXPR, type,
5256 build4 (ARRAY_REF, elem,
5257 t, integer_zero_node, NULL_TREE, NULL_TREE));
5258 return t;
5261 /* Expand EXP, a call to printf or printf_unlocked.
5262 Return NULL_RTX if a normal call should be emitted rather than transforming
5263 the function inline. If convenient, the result should be placed in
5264 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5265 call. */
5266 static rtx
5267 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5268 bool unlocked)
5270 /* If we're using an unlocked function, assume the other unlocked
5271 functions exist explicitly. */
5272 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5273 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5274 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5275 : implicit_built_in_decls[BUILT_IN_PUTS];
5276 const char *fmt_str;
5277 tree fn = 0;
5278 tree fmt, arg;
5279 int nargs = call_expr_nargs (exp);
5281 /* If the return value is used, don't do the transformation. */
5282 if (target != const0_rtx)
5283 return NULL_RTX;
5285 /* Verify the required arguments in the original call. */
5286 if (nargs == 0)
5287 return NULL_RTX;
5288 fmt = CALL_EXPR_ARG (exp, 0);
5289 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5290 return NULL_RTX;
5292 /* Check whether the format is a literal string constant. */
5293 fmt_str = c_getstr (fmt);
5294 if (fmt_str == NULL)
5295 return NULL_RTX;
5297 if (!init_target_chars ())
5298 return NULL_RTX;
5300 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5301 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5303 if ((nargs != 2)
5304 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5305 return NULL_RTX;
5306 if (fn_puts)
5307 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5309 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5310 else if (strcmp (fmt_str, target_percent_c) == 0)
5312 if ((nargs != 2)
5313 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5314 return NULL_RTX;
5315 if (fn_putchar)
5316 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5318 else
5320 /* We can't handle anything else with % args or %% ... yet. */
5321 if (strchr (fmt_str, target_percent))
5322 return NULL_RTX;
5324 if (nargs > 1)
5325 return NULL_RTX;
5327 /* If the format specifier was "", printf does nothing. */
5328 if (fmt_str[0] == '\0')
5329 return const0_rtx;
5330 /* If the format specifier has length of 1, call putchar. */
5331 if (fmt_str[1] == '\0')
5333 /* Given printf("c"), (where c is any one character,)
5334 convert "c"[0] to an int and pass that to the replacement
5335 function. */
5336 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5337 if (fn_putchar)
5338 fn = build_call_expr (fn_putchar, 1, arg);
5340 else
5342 /* If the format specifier was "string\n", call puts("string"). */
5343 size_t len = strlen (fmt_str);
5344 if ((unsigned char)fmt_str[len - 1] == target_newline)
5346 /* Create a NUL-terminated string that's one char shorter
5347 than the original, stripping off the trailing '\n'. */
5348 char *newstr = XALLOCAVEC (char, len);
5349 memcpy (newstr, fmt_str, len - 1);
5350 newstr[len - 1] = 0;
5351 arg = build_string_literal (len, newstr);
5352 if (fn_puts)
5353 fn = build_call_expr (fn_puts, 1, arg);
5355 else
5356 /* We'd like to arrange to call fputs(string,stdout) here,
5357 but we need stdout and don't have a way to get it yet. */
5358 return NULL_RTX;
5362 if (!fn)
5363 return NULL_RTX;
5364 if (TREE_CODE (fn) == CALL_EXPR)
5365 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5366 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5369 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5370 Return NULL_RTX if a normal call should be emitted rather than transforming
5371 the function inline. If convenient, the result should be placed in
5372 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5373 call. */
5374 static rtx
5375 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5376 bool unlocked)
5378 /* If we're using an unlocked function, assume the other unlocked
5379 functions exist explicitly. */
5380 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5381 : implicit_built_in_decls[BUILT_IN_FPUTC];
5382 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5383 : implicit_built_in_decls[BUILT_IN_FPUTS];
5384 const char *fmt_str;
5385 tree fn = 0;
5386 tree fmt, fp, arg;
5387 int nargs = call_expr_nargs (exp);
5389 /* If the return value is used, don't do the transformation. */
5390 if (target != const0_rtx)
5391 return NULL_RTX;
5393 /* Verify the required arguments in the original call. */
5394 if (nargs < 2)
5395 return NULL_RTX;
5396 fp = CALL_EXPR_ARG (exp, 0);
5397 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5398 return NULL_RTX;
5399 fmt = CALL_EXPR_ARG (exp, 1);
5400 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5401 return NULL_RTX;
5403 /* Check whether the format is a literal string constant. */
5404 fmt_str = c_getstr (fmt);
5405 if (fmt_str == NULL)
5406 return NULL_RTX;
5408 if (!init_target_chars ())
5409 return NULL_RTX;
5411 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5412 if (strcmp (fmt_str, target_percent_s) == 0)
5414 if ((nargs != 3)
5415 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5416 return NULL_RTX;
5417 arg = CALL_EXPR_ARG (exp, 2);
5418 if (fn_fputs)
5419 fn = build_call_expr (fn_fputs, 2, arg, fp);
5421 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5422 else if (strcmp (fmt_str, target_percent_c) == 0)
5424 if ((nargs != 3)
5425 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5426 return NULL_RTX;
5427 arg = CALL_EXPR_ARG (exp, 2);
5428 if (fn_fputc)
5429 fn = build_call_expr (fn_fputc, 2, arg, fp);
5431 else
5433 /* We can't handle anything else with % args or %% ... yet. */
5434 if (strchr (fmt_str, target_percent))
5435 return NULL_RTX;
5437 if (nargs > 2)
5438 return NULL_RTX;
5440 /* If the format specifier was "", fprintf does nothing. */
5441 if (fmt_str[0] == '\0')
5443 /* Evaluate and ignore FILE* argument for side-effects. */
5444 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5445 return const0_rtx;
5448 /* When "string" doesn't contain %, replace all cases of
5449 fprintf(stream,string) with fputs(string,stream). The fputs
5450 builtin will take care of special cases like length == 1. */
5451 if (fn_fputs)
5452 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5455 if (!fn)
5456 return NULL_RTX;
5457 if (TREE_CODE (fn) == CALL_EXPR)
5458 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5459 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5462 /* Expand a call EXP to sprintf. Return NULL_RTX if
5463 a normal call should be emitted rather than expanding the function
5464 inline. If convenient, the result should be placed in TARGET with
5465 mode MODE. */
5467 static rtx
5468 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5470 tree dest, fmt;
5471 const char *fmt_str;
5472 int nargs = call_expr_nargs (exp);
5474 /* Verify the required arguments in the original call. */
5475 if (nargs < 2)
5476 return NULL_RTX;
5477 dest = CALL_EXPR_ARG (exp, 0);
5478 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5479 return NULL_RTX;
5480 fmt = CALL_EXPR_ARG (exp, 0);
5481 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5482 return NULL_RTX;
5484 /* Check whether the format is a literal string constant. */
5485 fmt_str = c_getstr (fmt);
5486 if (fmt_str == NULL)
5487 return NULL_RTX;
5489 if (!init_target_chars ())
5490 return NULL_RTX;
5492 /* If the format doesn't contain % args or %%, use strcpy. */
5493 if (strchr (fmt_str, target_percent) == 0)
5495 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5496 tree exp;
5498 if ((nargs > 2) || ! fn)
5499 return NULL_RTX;
5500 expand_expr (build_call_expr (fn, 2, dest, fmt),
5501 const0_rtx, VOIDmode, EXPAND_NORMAL);
5502 if (target == const0_rtx)
5503 return const0_rtx;
5504 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5505 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5507 /* If the format is "%s", use strcpy if the result isn't used. */
5508 else if (strcmp (fmt_str, target_percent_s) == 0)
5510 tree fn, arg, len;
5511 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5513 if (! fn)
5514 return NULL_RTX;
5515 if (nargs != 3)
5516 return NULL_RTX;
5517 arg = CALL_EXPR_ARG (exp, 2);
5518 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5519 return NULL_RTX;
5521 if (target != const0_rtx)
5523 len = c_strlen (arg, 1);
5524 if (! len || TREE_CODE (len) != INTEGER_CST)
5525 return NULL_RTX;
5527 else
5528 len = NULL_TREE;
5530 expand_expr (build_call_expr (fn, 2, dest, arg),
5531 const0_rtx, VOIDmode, EXPAND_NORMAL);
5533 if (target == const0_rtx)
5534 return const0_rtx;
5535 return expand_expr (len, target, mode, EXPAND_NORMAL);
5538 return NULL_RTX;
5541 /* Expand a call to either the entry or exit function profiler. */
5543 static rtx
5544 expand_builtin_profile_func (bool exitp)
5546 rtx this, which;
5548 this = DECL_RTL (current_function_decl);
5549 gcc_assert (MEM_P (this));
5550 this = XEXP (this, 0);
5552 if (exitp)
5553 which = profile_function_exit_libfunc;
5554 else
5555 which = profile_function_entry_libfunc;
5557 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5558 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5560 Pmode);
5562 return const0_rtx;
5565 /* Expand a call to __builtin___clear_cache. */
5567 static rtx
5568 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5570 #ifndef HAVE_clear_cache
5571 #ifdef CLEAR_INSN_CACHE
5572 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5573 does something. Just do the default expansion to a call to
5574 __clear_cache(). */
5575 return NULL_RTX;
5576 #else
5577 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5578 does nothing. There is no need to call it. Do nothing. */
5579 return const0_rtx;
5580 #endif /* CLEAR_INSN_CACHE */
5581 #else
5582 /* We have a "clear_cache" insn, and it will handle everything. */
5583 tree begin, end;
5584 rtx begin_rtx, end_rtx;
5585 enum insn_code icode;
5587 /* We must not expand to a library call. If we did, any
5588 fallback library function in libgcc that might contain a call to
5589 __builtin___clear_cache() would recurse infinitely. */
5590 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5592 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5593 return const0_rtx;
5596 if (HAVE_clear_cache)
5598 icode = CODE_FOR_clear_cache;
5600 begin = CALL_EXPR_ARG (exp, 0);
5601 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5602 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5603 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5604 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5606 end = CALL_EXPR_ARG (exp, 1);
5607 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5608 end_rtx = convert_memory_address (Pmode, end_rtx);
5609 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5610 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5612 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5614 return const0_rtx;
5615 #endif /* HAVE_clear_cache */
5618 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5620 static rtx
5621 round_trampoline_addr (rtx tramp)
5623 rtx temp, addend, mask;
5625 /* If we don't need too much alignment, we'll have been guaranteed
5626 proper alignment by get_trampoline_type. */
5627 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5628 return tramp;
5630 /* Round address up to desired boundary. */
5631 temp = gen_reg_rtx (Pmode);
5632 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5633 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5635 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5636 temp, 0, OPTAB_LIB_WIDEN);
5637 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5638 temp, 0, OPTAB_LIB_WIDEN);
5640 return tramp;
5643 static rtx
5644 expand_builtin_init_trampoline (tree exp)
5646 tree t_tramp, t_func, t_chain;
5647 rtx r_tramp, r_func, r_chain;
5648 #ifdef TRAMPOLINE_TEMPLATE
5649 rtx blktramp;
5650 #endif
5652 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5653 POINTER_TYPE, VOID_TYPE))
5654 return NULL_RTX;
5656 t_tramp = CALL_EXPR_ARG (exp, 0);
5657 t_func = CALL_EXPR_ARG (exp, 1);
5658 t_chain = CALL_EXPR_ARG (exp, 2);
5660 r_tramp = expand_normal (t_tramp);
5661 r_func = expand_normal (t_func);
5662 r_chain = expand_normal (t_chain);
5664 /* Generate insns to initialize the trampoline. */
5665 r_tramp = round_trampoline_addr (r_tramp);
5666 #ifdef TRAMPOLINE_TEMPLATE
5667 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5668 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5669 emit_block_move (blktramp, assemble_trampoline_template (),
5670 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5671 #endif
5672 trampolines_created = 1;
5673 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5675 return const0_rtx;
5678 static rtx
5679 expand_builtin_adjust_trampoline (tree exp)
5681 rtx tramp;
5683 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5684 return NULL_RTX;
5686 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5687 tramp = round_trampoline_addr (tramp);
5688 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5689 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5690 #endif
5692 return tramp;
5695 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5696 function. The function first checks whether the back end provides
5697 an insn to implement signbit for the respective mode. If not, it
5698 checks whether the floating point format of the value is such that
5699 the sign bit can be extracted. If that is not the case, the
5700 function returns NULL_RTX to indicate that a normal call should be
5701 emitted rather than expanding the function in-line. EXP is the
5702 expression that is a call to the builtin function; if convenient,
5703 the result should be placed in TARGET. */
5704 static rtx
5705 expand_builtin_signbit (tree exp, rtx target)
5707 const struct real_format *fmt;
5708 enum machine_mode fmode, imode, rmode;
5709 HOST_WIDE_INT hi, lo;
5710 tree arg;
5711 int word, bitpos;
5712 enum insn_code icode;
5713 rtx temp;
5715 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5716 return NULL_RTX;
5718 arg = CALL_EXPR_ARG (exp, 0);
5719 fmode = TYPE_MODE (TREE_TYPE (arg));
5720 rmode = TYPE_MODE (TREE_TYPE (exp));
5721 fmt = REAL_MODE_FORMAT (fmode);
5723 arg = builtin_save_expr (arg);
5725 /* Expand the argument yielding a RTX expression. */
5726 temp = expand_normal (arg);
5728 /* Check if the back end provides an insn that handles signbit for the
5729 argument's mode. */
5730 icode = signbit_optab->handlers [(int) fmode].insn_code;
5731 if (icode != CODE_FOR_nothing)
5733 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5734 emit_unop_insn (icode, target, temp, UNKNOWN);
5735 return target;
5738 /* For floating point formats without a sign bit, implement signbit
5739 as "ARG < 0.0". */
5740 bitpos = fmt->signbit_ro;
5741 if (bitpos < 0)
5743 /* But we can't do this if the format supports signed zero. */
5744 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5745 return NULL_RTX;
5747 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5748 build_real (TREE_TYPE (arg), dconst0));
5749 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5752 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5754 imode = int_mode_for_mode (fmode);
5755 if (imode == BLKmode)
5756 return NULL_RTX;
5757 temp = gen_lowpart (imode, temp);
5759 else
5761 imode = word_mode;
5762 /* Handle targets with different FP word orders. */
5763 if (FLOAT_WORDS_BIG_ENDIAN)
5764 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5765 else
5766 word = bitpos / BITS_PER_WORD;
5767 temp = operand_subword_force (temp, word, fmode);
5768 bitpos = bitpos % BITS_PER_WORD;
5771 /* Force the intermediate word_mode (or narrower) result into a
5772 register. This avoids attempting to create paradoxical SUBREGs
5773 of floating point modes below. */
5774 temp = force_reg (imode, temp);
5776 /* If the bitpos is within the "result mode" lowpart, the operation
5777 can be implement with a single bitwise AND. Otherwise, we need
5778 a right shift and an AND. */
5780 if (bitpos < GET_MODE_BITSIZE (rmode))
5782 if (bitpos < HOST_BITS_PER_WIDE_INT)
5784 hi = 0;
5785 lo = (HOST_WIDE_INT) 1 << bitpos;
5787 else
5789 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5790 lo = 0;
5793 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5794 temp = gen_lowpart (rmode, temp);
5795 temp = expand_binop (rmode, and_optab, temp,
5796 immed_double_const (lo, hi, rmode),
5797 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5799 else
5801 /* Perform a logical right shift to place the signbit in the least
5802 significant bit, then truncate the result to the desired mode
5803 and mask just this bit. */
5804 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5805 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5806 temp = gen_lowpart (rmode, temp);
5807 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5808 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5811 return temp;
5814 /* Expand fork or exec calls. TARGET is the desired target of the
5815 call. EXP is the call. FN is the
5816 identificator of the actual function. IGNORE is nonzero if the
5817 value is to be ignored. */
5819 static rtx
5820 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5822 tree id, decl;
5823 tree call;
5825 /* If we are not profiling, just call the function. */
5826 if (!profile_arc_flag)
5827 return NULL_RTX;
5829 /* Otherwise call the wrapper. This should be equivalent for the rest of
5830 compiler, so the code does not diverge, and the wrapper may run the
5831 code necessary for keeping the profiling sane. */
5833 switch (DECL_FUNCTION_CODE (fn))
5835 case BUILT_IN_FORK:
5836 id = get_identifier ("__gcov_fork");
5837 break;
5839 case BUILT_IN_EXECL:
5840 id = get_identifier ("__gcov_execl");
5841 break;
5843 case BUILT_IN_EXECV:
5844 id = get_identifier ("__gcov_execv");
5845 break;
5847 case BUILT_IN_EXECLP:
5848 id = get_identifier ("__gcov_execlp");
5849 break;
5851 case BUILT_IN_EXECLE:
5852 id = get_identifier ("__gcov_execle");
5853 break;
5855 case BUILT_IN_EXECVP:
5856 id = get_identifier ("__gcov_execvp");
5857 break;
5859 case BUILT_IN_EXECVE:
5860 id = get_identifier ("__gcov_execve");
5861 break;
5863 default:
5864 gcc_unreachable ();
5867 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5868 DECL_EXTERNAL (decl) = 1;
5869 TREE_PUBLIC (decl) = 1;
5870 DECL_ARTIFICIAL (decl) = 1;
5871 TREE_NOTHROW (decl) = 1;
5872 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5873 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5874 call = rewrite_call_expr (exp, 0, decl, 0);
5875 return expand_call (call, target, ignore);
5880 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5881 the pointer in these functions is void*, the tree optimizers may remove
5882 casts. The mode computed in expand_builtin isn't reliable either, due
5883 to __sync_bool_compare_and_swap.
5885 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5886 group of builtins. This gives us log2 of the mode size. */
5888 static inline enum machine_mode
5889 get_builtin_sync_mode (int fcode_diff)
5891 /* The size is not negotiable, so ask not to get BLKmode in return
5892 if the target indicates that a smaller size would be better. */
5893 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5896 /* Expand the memory expression LOC and return the appropriate memory operand
5897 for the builtin_sync operations. */
5899 static rtx
5900 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5902 rtx addr, mem;
5904 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5906 /* Note that we explicitly do not want any alias information for this
5907 memory, so that we kill all other live memories. Otherwise we don't
5908 satisfy the full barrier semantics of the intrinsic. */
5909 mem = validize_mem (gen_rtx_MEM (mode, addr));
5911 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5912 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5913 MEM_VOLATILE_P (mem) = 1;
5915 return mem;
5918 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5919 EXP is the CALL_EXPR. CODE is the rtx code
5920 that corresponds to the arithmetic or logical operation from the name;
5921 an exception here is that NOT actually means NAND. TARGET is an optional
5922 place for us to store the results; AFTER is true if this is the
5923 fetch_and_xxx form. IGNORE is true if we don't actually care about
5924 the result of the operation at all. */
5926 static rtx
5927 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5928 enum rtx_code code, bool after,
5929 rtx target, bool ignore)
5931 rtx val, mem;
5932 enum machine_mode old_mode;
5934 /* Expand the operands. */
5935 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5937 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5938 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5939 of CONST_INTs, where we know the old_mode only from the call argument. */
5940 old_mode = GET_MODE (val);
5941 if (old_mode == VOIDmode)
5942 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5943 val = convert_modes (mode, old_mode, val, 1);
5945 if (ignore)
5946 return expand_sync_operation (mem, val, code);
5947 else
5948 return expand_sync_fetch_operation (mem, val, code, after, target);
5951 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5952 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5953 true if this is the boolean form. TARGET is a place for us to store the
5954 results; this is NOT optional if IS_BOOL is true. */
5956 static rtx
5957 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5958 bool is_bool, rtx target)
5960 rtx old_val, new_val, mem;
5961 enum machine_mode old_mode;
5963 /* Expand the operands. */
5964 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5967 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5968 mode, EXPAND_NORMAL);
5969 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5970 of CONST_INTs, where we know the old_mode only from the call argument. */
5971 old_mode = GET_MODE (old_val);
5972 if (old_mode == VOIDmode)
5973 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5974 old_val = convert_modes (mode, old_mode, old_val, 1);
5976 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5977 mode, EXPAND_NORMAL);
5978 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5979 of CONST_INTs, where we know the old_mode only from the call argument. */
5980 old_mode = GET_MODE (new_val);
5981 if (old_mode == VOIDmode)
5982 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5983 new_val = convert_modes (mode, old_mode, new_val, 1);
5985 if (is_bool)
5986 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5987 else
5988 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5991 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5992 general form is actually an atomic exchange, and some targets only
5993 support a reduced form with the second argument being a constant 1.
5994 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5995 the results. */
5997 static rtx
5998 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5999 rtx target)
6001 rtx val, mem;
6002 enum machine_mode old_mode;
6004 /* Expand the operands. */
6005 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6006 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6007 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6008 of CONST_INTs, where we know the old_mode only from the call argument. */
6009 old_mode = GET_MODE (val);
6010 if (old_mode == VOIDmode)
6011 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6012 val = convert_modes (mode, old_mode, val, 1);
6014 return expand_sync_lock_test_and_set (mem, val, target);
6017 /* Expand the __sync_synchronize intrinsic. */
6019 static void
6020 expand_builtin_synchronize (void)
6022 tree x;
6024 #ifdef HAVE_memory_barrier
6025 if (HAVE_memory_barrier)
6027 emit_insn (gen_memory_barrier ());
6028 return;
6030 #endif
6032 if (synchronize_libfunc != NULL_RTX)
6034 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6035 return;
6038 /* If no explicit memory barrier instruction is available, create an
6039 empty asm stmt with a memory clobber. */
6040 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6041 tree_cons (NULL, build_string (6, "memory"), NULL));
6042 ASM_VOLATILE_P (x) = 1;
6043 expand_asm_expr (x);
6046 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6048 static void
6049 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6051 enum insn_code icode;
6052 rtx mem, insn;
6053 rtx val = const0_rtx;
6055 /* Expand the operands. */
6056 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6058 /* If there is an explicit operation in the md file, use it. */
6059 icode = sync_lock_release[mode];
6060 if (icode != CODE_FOR_nothing)
6062 if (!insn_data[icode].operand[1].predicate (val, mode))
6063 val = force_reg (mode, val);
6065 insn = GEN_FCN (icode) (mem, val);
6066 if (insn)
6068 emit_insn (insn);
6069 return;
6073 /* Otherwise we can implement this operation by emitting a barrier
6074 followed by a store of zero. */
6075 expand_builtin_synchronize ();
6076 emit_move_insn (mem, val);
6079 /* Expand an expression EXP that calls a built-in function,
6080 with result going to TARGET if that's convenient
6081 (and in mode MODE if that's convenient).
6082 SUBTARGET may be used as the target for computing one of EXP's operands.
6083 IGNORE is nonzero if the value is to be ignored. */
6086 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6087 int ignore)
6089 tree fndecl = get_callee_fndecl (exp);
6090 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6091 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6093 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6094 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6096 /* When not optimizing, generate calls to library functions for a certain
6097 set of builtins. */
6098 if (!optimize
6099 && !called_as_built_in (fndecl)
6100 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6101 && fcode != BUILT_IN_ALLOCA)
6102 return expand_call (exp, target, ignore);
6104 /* The built-in function expanders test for target == const0_rtx
6105 to determine whether the function's result will be ignored. */
6106 if (ignore)
6107 target = const0_rtx;
6109 /* If the result of a pure or const built-in function is ignored, and
6110 none of its arguments are volatile, we can avoid expanding the
6111 built-in call and just evaluate the arguments for side-effects. */
6112 if (target == const0_rtx
6113 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6115 bool volatilep = false;
6116 tree arg;
6117 call_expr_arg_iterator iter;
6119 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6120 if (TREE_THIS_VOLATILE (arg))
6122 volatilep = true;
6123 break;
6126 if (! volatilep)
6128 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6129 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6130 return const0_rtx;
6134 switch (fcode)
6136 CASE_FLT_FN (BUILT_IN_FABS):
6137 target = expand_builtin_fabs (exp, target, subtarget);
6138 if (target)
6139 return target;
6140 break;
6142 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6143 target = expand_builtin_copysign (exp, target, subtarget);
6144 if (target)
6145 return target;
6146 break;
6148 /* Just do a normal library call if we were unable to fold
6149 the values. */
6150 CASE_FLT_FN (BUILT_IN_CABS):
6151 break;
6153 CASE_FLT_FN (BUILT_IN_EXP):
6154 CASE_FLT_FN (BUILT_IN_EXP10):
6155 CASE_FLT_FN (BUILT_IN_POW10):
6156 CASE_FLT_FN (BUILT_IN_EXP2):
6157 CASE_FLT_FN (BUILT_IN_EXPM1):
6158 CASE_FLT_FN (BUILT_IN_LOGB):
6159 CASE_FLT_FN (BUILT_IN_LOG):
6160 CASE_FLT_FN (BUILT_IN_LOG10):
6161 CASE_FLT_FN (BUILT_IN_LOG2):
6162 CASE_FLT_FN (BUILT_IN_LOG1P):
6163 CASE_FLT_FN (BUILT_IN_TAN):
6164 CASE_FLT_FN (BUILT_IN_ASIN):
6165 CASE_FLT_FN (BUILT_IN_ACOS):
6166 CASE_FLT_FN (BUILT_IN_ATAN):
6167 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6168 because of possible accuracy problems. */
6169 if (! flag_unsafe_math_optimizations)
6170 break;
6171 CASE_FLT_FN (BUILT_IN_SQRT):
6172 CASE_FLT_FN (BUILT_IN_FLOOR):
6173 CASE_FLT_FN (BUILT_IN_CEIL):
6174 CASE_FLT_FN (BUILT_IN_TRUNC):
6175 CASE_FLT_FN (BUILT_IN_ROUND):
6176 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6177 CASE_FLT_FN (BUILT_IN_RINT):
6178 target = expand_builtin_mathfn (exp, target, subtarget);
6179 if (target)
6180 return target;
6181 break;
6183 CASE_FLT_FN (BUILT_IN_ILOGB):
6184 if (! flag_unsafe_math_optimizations)
6185 break;
6186 CASE_FLT_FN (BUILT_IN_ISINF):
6187 CASE_FLT_FN (BUILT_IN_FINITE):
6188 case BUILT_IN_ISFINITE:
6189 case BUILT_IN_ISNORMAL:
6190 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6191 if (target)
6192 return target;
6193 break;
6195 CASE_FLT_FN (BUILT_IN_LCEIL):
6196 CASE_FLT_FN (BUILT_IN_LLCEIL):
6197 CASE_FLT_FN (BUILT_IN_LFLOOR):
6198 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6199 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6200 if (target)
6201 return target;
6202 break;
6204 CASE_FLT_FN (BUILT_IN_LRINT):
6205 CASE_FLT_FN (BUILT_IN_LLRINT):
6206 CASE_FLT_FN (BUILT_IN_LROUND):
6207 CASE_FLT_FN (BUILT_IN_LLROUND):
6208 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6209 if (target)
6210 return target;
6211 break;
6213 CASE_FLT_FN (BUILT_IN_POW):
6214 target = expand_builtin_pow (exp, target, subtarget);
6215 if (target)
6216 return target;
6217 break;
6219 CASE_FLT_FN (BUILT_IN_POWI):
6220 target = expand_builtin_powi (exp, target, subtarget);
6221 if (target)
6222 return target;
6223 break;
6225 CASE_FLT_FN (BUILT_IN_ATAN2):
6226 CASE_FLT_FN (BUILT_IN_LDEXP):
6227 CASE_FLT_FN (BUILT_IN_SCALB):
6228 CASE_FLT_FN (BUILT_IN_SCALBN):
6229 CASE_FLT_FN (BUILT_IN_SCALBLN):
6230 if (! flag_unsafe_math_optimizations)
6231 break;
6233 CASE_FLT_FN (BUILT_IN_FMOD):
6234 CASE_FLT_FN (BUILT_IN_REMAINDER):
6235 CASE_FLT_FN (BUILT_IN_DREM):
6236 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6237 if (target)
6238 return target;
6239 break;
6241 CASE_FLT_FN (BUILT_IN_CEXPI):
6242 target = expand_builtin_cexpi (exp, target, subtarget);
6243 gcc_assert (target);
6244 return target;
6246 CASE_FLT_FN (BUILT_IN_SIN):
6247 CASE_FLT_FN (BUILT_IN_COS):
6248 if (! flag_unsafe_math_optimizations)
6249 break;
6250 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6251 if (target)
6252 return target;
6253 break;
6255 CASE_FLT_FN (BUILT_IN_SINCOS):
6256 if (! flag_unsafe_math_optimizations)
6257 break;
6258 target = expand_builtin_sincos (exp);
6259 if (target)
6260 return target;
6261 break;
6263 case BUILT_IN_APPLY_ARGS:
6264 return expand_builtin_apply_args ();
6266 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6267 FUNCTION with a copy of the parameters described by
6268 ARGUMENTS, and ARGSIZE. It returns a block of memory
6269 allocated on the stack into which is stored all the registers
6270 that might possibly be used for returning the result of a
6271 function. ARGUMENTS is the value returned by
6272 __builtin_apply_args. ARGSIZE is the number of bytes of
6273 arguments that must be copied. ??? How should this value be
6274 computed? We'll also need a safe worst case value for varargs
6275 functions. */
6276 case BUILT_IN_APPLY:
6277 if (!validate_arglist (exp, POINTER_TYPE,
6278 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6279 && !validate_arglist (exp, REFERENCE_TYPE,
6280 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6281 return const0_rtx;
6282 else
6284 rtx ops[3];
6286 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6287 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6288 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6290 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6293 /* __builtin_return (RESULT) causes the function to return the
6294 value described by RESULT. RESULT is address of the block of
6295 memory returned by __builtin_apply. */
6296 case BUILT_IN_RETURN:
6297 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6298 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6299 return const0_rtx;
6301 case BUILT_IN_SAVEREGS:
6302 return expand_builtin_saveregs ();
6304 case BUILT_IN_ARGS_INFO:
6305 return expand_builtin_args_info (exp);
6307 case BUILT_IN_VA_ARG_PACK:
6308 /* All valid uses of __builtin_va_arg_pack () are removed during
6309 inlining. */
6310 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6311 return const0_rtx;
6313 case BUILT_IN_VA_ARG_PACK_LEN:
6314 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6315 inlining. */
6316 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6317 return const0_rtx;
6319 /* Return the address of the first anonymous stack arg. */
6320 case BUILT_IN_NEXT_ARG:
6321 if (fold_builtin_next_arg (exp, false))
6322 return const0_rtx;
6323 return expand_builtin_next_arg ();
6325 case BUILT_IN_CLEAR_CACHE:
6326 target = expand_builtin___clear_cache (exp);
6327 if (target)
6328 return target;
6329 break;
6331 case BUILT_IN_CLASSIFY_TYPE:
6332 return expand_builtin_classify_type (exp);
6334 case BUILT_IN_CONSTANT_P:
6335 return const0_rtx;
6337 case BUILT_IN_FRAME_ADDRESS:
6338 case BUILT_IN_RETURN_ADDRESS:
6339 return expand_builtin_frame_address (fndecl, exp);
6341 /* Returns the address of the area where the structure is returned.
6342 0 otherwise. */
6343 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6344 if (call_expr_nargs (exp) != 0
6345 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6346 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6347 return const0_rtx;
6348 else
6349 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6351 case BUILT_IN_ALLOCA:
6352 target = expand_builtin_alloca (exp, target);
6353 if (target)
6354 return target;
6355 break;
6357 case BUILT_IN_STACK_SAVE:
6358 return expand_stack_save ();
6360 case BUILT_IN_STACK_RESTORE:
6361 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6362 return const0_rtx;
6364 case BUILT_IN_BSWAP32:
6365 case BUILT_IN_BSWAP64:
6366 target = expand_builtin_bswap (exp, target, subtarget);
6368 if (target)
6369 return target;
6370 break;
6372 CASE_INT_FN (BUILT_IN_FFS):
6373 case BUILT_IN_FFSIMAX:
6374 target = expand_builtin_unop (target_mode, exp, target,
6375 subtarget, ffs_optab);
6376 if (target)
6377 return target;
6378 break;
6380 CASE_INT_FN (BUILT_IN_CLZ):
6381 case BUILT_IN_CLZIMAX:
6382 target = expand_builtin_unop (target_mode, exp, target,
6383 subtarget, clz_optab);
6384 if (target)
6385 return target;
6386 break;
6388 CASE_INT_FN (BUILT_IN_CTZ):
6389 case BUILT_IN_CTZIMAX:
6390 target = expand_builtin_unop (target_mode, exp, target,
6391 subtarget, ctz_optab);
6392 if (target)
6393 return target;
6394 break;
6396 CASE_INT_FN (BUILT_IN_POPCOUNT):
6397 case BUILT_IN_POPCOUNTIMAX:
6398 target = expand_builtin_unop (target_mode, exp, target,
6399 subtarget, popcount_optab);
6400 if (target)
6401 return target;
6402 break;
6404 CASE_INT_FN (BUILT_IN_PARITY):
6405 case BUILT_IN_PARITYIMAX:
6406 target = expand_builtin_unop (target_mode, exp, target,
6407 subtarget, parity_optab);
6408 if (target)
6409 return target;
6410 break;
6412 case BUILT_IN_STRLEN:
6413 target = expand_builtin_strlen (exp, target, target_mode);
6414 if (target)
6415 return target;
6416 break;
6418 case BUILT_IN_STRCPY:
6419 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6420 if (target)
6421 return target;
6422 break;
6424 case BUILT_IN_STRNCPY:
6425 target = expand_builtin_strncpy (exp, target, mode);
6426 if (target)
6427 return target;
6428 break;
6430 case BUILT_IN_STPCPY:
6431 target = expand_builtin_stpcpy (exp, target, mode);
6432 if (target)
6433 return target;
6434 break;
6436 case BUILT_IN_STRCAT:
6437 target = expand_builtin_strcat (fndecl, exp, target, mode);
6438 if (target)
6439 return target;
6440 break;
6442 case BUILT_IN_STRNCAT:
6443 target = expand_builtin_strncat (exp, target, mode);
6444 if (target)
6445 return target;
6446 break;
6448 case BUILT_IN_STRSPN:
6449 target = expand_builtin_strspn (exp, target, mode);
6450 if (target)
6451 return target;
6452 break;
6454 case BUILT_IN_STRCSPN:
6455 target = expand_builtin_strcspn (exp, target, mode);
6456 if (target)
6457 return target;
6458 break;
6460 case BUILT_IN_STRSTR:
6461 target = expand_builtin_strstr (exp, target, mode);
6462 if (target)
6463 return target;
6464 break;
6466 case BUILT_IN_STRPBRK:
6467 target = expand_builtin_strpbrk (exp, target, mode);
6468 if (target)
6469 return target;
6470 break;
6472 case BUILT_IN_INDEX:
6473 case BUILT_IN_STRCHR:
6474 target = expand_builtin_strchr (exp, target, mode);
6475 if (target)
6476 return target;
6477 break;
6479 case BUILT_IN_RINDEX:
6480 case BUILT_IN_STRRCHR:
6481 target = expand_builtin_strrchr (exp, target, mode);
6482 if (target)
6483 return target;
6484 break;
6486 case BUILT_IN_MEMCPY:
6487 target = expand_builtin_memcpy (exp, target, mode);
6488 if (target)
6489 return target;
6490 break;
6492 case BUILT_IN_MEMPCPY:
6493 target = expand_builtin_mempcpy (exp, target, mode);
6494 if (target)
6495 return target;
6496 break;
6498 case BUILT_IN_MEMMOVE:
6499 target = expand_builtin_memmove (exp, target, mode, ignore);
6500 if (target)
6501 return target;
6502 break;
6504 case BUILT_IN_BCOPY:
6505 target = expand_builtin_bcopy (exp, ignore);
6506 if (target)
6507 return target;
6508 break;
6510 case BUILT_IN_MEMSET:
6511 target = expand_builtin_memset (exp, target, mode);
6512 if (target)
6513 return target;
6514 break;
6516 case BUILT_IN_BZERO:
6517 target = expand_builtin_bzero (exp);
6518 if (target)
6519 return target;
6520 break;
6522 case BUILT_IN_STRCMP:
6523 target = expand_builtin_strcmp (exp, target, mode);
6524 if (target)
6525 return target;
6526 break;
6528 case BUILT_IN_STRNCMP:
6529 target = expand_builtin_strncmp (exp, target, mode);
6530 if (target)
6531 return target;
6532 break;
6534 case BUILT_IN_MEMCHR:
6535 target = expand_builtin_memchr (exp, target, mode);
6536 if (target)
6537 return target;
6538 break;
6540 case BUILT_IN_BCMP:
6541 case BUILT_IN_MEMCMP:
6542 target = expand_builtin_memcmp (exp, target, mode);
6543 if (target)
6544 return target;
6545 break;
6547 case BUILT_IN_SETJMP:
6548 /* This should have been lowered to the builtins below. */
6549 gcc_unreachable ();
6551 case BUILT_IN_SETJMP_SETUP:
6552 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6553 and the receiver label. */
6554 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6556 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6557 VOIDmode, EXPAND_NORMAL);
6558 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6559 rtx label_r = label_rtx (label);
6561 /* This is copied from the handling of non-local gotos. */
6562 expand_builtin_setjmp_setup (buf_addr, label_r);
6563 nonlocal_goto_handler_labels
6564 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6565 nonlocal_goto_handler_labels);
6566 /* ??? Do not let expand_label treat us as such since we would
6567 not want to be both on the list of non-local labels and on
6568 the list of forced labels. */
6569 FORCED_LABEL (label) = 0;
6570 return const0_rtx;
6572 break;
6574 case BUILT_IN_SETJMP_DISPATCHER:
6575 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6576 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6578 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6579 rtx label_r = label_rtx (label);
6581 /* Remove the dispatcher label from the list of non-local labels
6582 since the receiver labels have been added to it above. */
6583 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6584 return const0_rtx;
6586 break;
6588 case BUILT_IN_SETJMP_RECEIVER:
6589 /* __builtin_setjmp_receiver is passed the receiver label. */
6590 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6592 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6593 rtx label_r = label_rtx (label);
6595 expand_builtin_setjmp_receiver (label_r);
6596 return const0_rtx;
6598 break;
6600 /* __builtin_longjmp is passed a pointer to an array of five words.
6601 It's similar to the C library longjmp function but works with
6602 __builtin_setjmp above. */
6603 case BUILT_IN_LONGJMP:
6604 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6606 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6607 VOIDmode, EXPAND_NORMAL);
6608 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6610 if (value != const1_rtx)
6612 error ("%<__builtin_longjmp%> second argument must be 1");
6613 return const0_rtx;
6616 expand_builtin_longjmp (buf_addr, value);
6617 return const0_rtx;
6619 break;
6621 case BUILT_IN_NONLOCAL_GOTO:
6622 target = expand_builtin_nonlocal_goto (exp);
6623 if (target)
6624 return target;
6625 break;
6627 /* This updates the setjmp buffer that is its argument with the value
6628 of the current stack pointer. */
6629 case BUILT_IN_UPDATE_SETJMP_BUF:
6630 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6632 rtx buf_addr
6633 = expand_normal (CALL_EXPR_ARG (exp, 0));
6635 expand_builtin_update_setjmp_buf (buf_addr);
6636 return const0_rtx;
6638 break;
6640 case BUILT_IN_TRAP:
6641 expand_builtin_trap ();
6642 return const0_rtx;
6644 case BUILT_IN_PRINTF:
6645 target = expand_builtin_printf (exp, target, mode, false);
6646 if (target)
6647 return target;
6648 break;
6650 case BUILT_IN_PRINTF_UNLOCKED:
6651 target = expand_builtin_printf (exp, target, mode, true);
6652 if (target)
6653 return target;
6654 break;
6656 case BUILT_IN_FPUTS:
6657 target = expand_builtin_fputs (exp, target, false);
6658 if (target)
6659 return target;
6660 break;
6661 case BUILT_IN_FPUTS_UNLOCKED:
6662 target = expand_builtin_fputs (exp, target, true);
6663 if (target)
6664 return target;
6665 break;
6667 case BUILT_IN_FPRINTF:
6668 target = expand_builtin_fprintf (exp, target, mode, false);
6669 if (target)
6670 return target;
6671 break;
6673 case BUILT_IN_FPRINTF_UNLOCKED:
6674 target = expand_builtin_fprintf (exp, target, mode, true);
6675 if (target)
6676 return target;
6677 break;
6679 case BUILT_IN_SPRINTF:
6680 target = expand_builtin_sprintf (exp, target, mode);
6681 if (target)
6682 return target;
6683 break;
6685 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6686 case BUILT_IN_SIGNBITD32:
6687 case BUILT_IN_SIGNBITD64:
6688 case BUILT_IN_SIGNBITD128:
6689 target = expand_builtin_signbit (exp, target);
6690 if (target)
6691 return target;
6692 break;
6694 /* Various hooks for the DWARF 2 __throw routine. */
6695 case BUILT_IN_UNWIND_INIT:
6696 expand_builtin_unwind_init ();
6697 return const0_rtx;
6698 case BUILT_IN_DWARF_CFA:
6699 return virtual_cfa_rtx;
6700 #ifdef DWARF2_UNWIND_INFO
6701 case BUILT_IN_DWARF_SP_COLUMN:
6702 return expand_builtin_dwarf_sp_column ();
6703 case BUILT_IN_INIT_DWARF_REG_SIZES:
6704 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6705 return const0_rtx;
6706 #endif
6707 case BUILT_IN_FROB_RETURN_ADDR:
6708 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6709 case BUILT_IN_EXTRACT_RETURN_ADDR:
6710 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6711 case BUILT_IN_EH_RETURN:
6712 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6713 CALL_EXPR_ARG (exp, 1));
6714 return const0_rtx;
6715 #ifdef EH_RETURN_DATA_REGNO
6716 case BUILT_IN_EH_RETURN_DATA_REGNO:
6717 return expand_builtin_eh_return_data_regno (exp);
6718 #endif
6719 case BUILT_IN_EXTEND_POINTER:
6720 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6722 case BUILT_IN_VA_START:
6723 return expand_builtin_va_start (exp);
6724 case BUILT_IN_VA_END:
6725 return expand_builtin_va_end (exp);
6726 case BUILT_IN_VA_COPY:
6727 return expand_builtin_va_copy (exp);
6728 case BUILT_IN_EXPECT:
6729 return expand_builtin_expect (exp, target);
6730 case BUILT_IN_PREFETCH:
6731 expand_builtin_prefetch (exp);
6732 return const0_rtx;
6734 case BUILT_IN_PROFILE_FUNC_ENTER:
6735 return expand_builtin_profile_func (false);
6736 case BUILT_IN_PROFILE_FUNC_EXIT:
6737 return expand_builtin_profile_func (true);
6739 case BUILT_IN_INIT_TRAMPOLINE:
6740 return expand_builtin_init_trampoline (exp);
6741 case BUILT_IN_ADJUST_TRAMPOLINE:
6742 return expand_builtin_adjust_trampoline (exp);
6744 case BUILT_IN_FORK:
6745 case BUILT_IN_EXECL:
6746 case BUILT_IN_EXECV:
6747 case BUILT_IN_EXECLP:
6748 case BUILT_IN_EXECLE:
6749 case BUILT_IN_EXECVP:
6750 case BUILT_IN_EXECVE:
6751 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6752 if (target)
6753 return target;
6754 break;
6756 case BUILT_IN_FETCH_AND_ADD_1:
6757 case BUILT_IN_FETCH_AND_ADD_2:
6758 case BUILT_IN_FETCH_AND_ADD_4:
6759 case BUILT_IN_FETCH_AND_ADD_8:
6760 case BUILT_IN_FETCH_AND_ADD_16:
6761 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6762 target = expand_builtin_sync_operation (mode, exp, PLUS,
6763 false, target, ignore);
6764 if (target)
6765 return target;
6766 break;
6768 case BUILT_IN_FETCH_AND_SUB_1:
6769 case BUILT_IN_FETCH_AND_SUB_2:
6770 case BUILT_IN_FETCH_AND_SUB_4:
6771 case BUILT_IN_FETCH_AND_SUB_8:
6772 case BUILT_IN_FETCH_AND_SUB_16:
6773 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6774 target = expand_builtin_sync_operation (mode, exp, MINUS,
6775 false, target, ignore);
6776 if (target)
6777 return target;
6778 break;
6780 case BUILT_IN_FETCH_AND_OR_1:
6781 case BUILT_IN_FETCH_AND_OR_2:
6782 case BUILT_IN_FETCH_AND_OR_4:
6783 case BUILT_IN_FETCH_AND_OR_8:
6784 case BUILT_IN_FETCH_AND_OR_16:
6785 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6786 target = expand_builtin_sync_operation (mode, exp, IOR,
6787 false, target, ignore);
6788 if (target)
6789 return target;
6790 break;
6792 case BUILT_IN_FETCH_AND_AND_1:
6793 case BUILT_IN_FETCH_AND_AND_2:
6794 case BUILT_IN_FETCH_AND_AND_4:
6795 case BUILT_IN_FETCH_AND_AND_8:
6796 case BUILT_IN_FETCH_AND_AND_16:
6797 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6798 target = expand_builtin_sync_operation (mode, exp, AND,
6799 false, target, ignore);
6800 if (target)
6801 return target;
6802 break;
6804 case BUILT_IN_FETCH_AND_XOR_1:
6805 case BUILT_IN_FETCH_AND_XOR_2:
6806 case BUILT_IN_FETCH_AND_XOR_4:
6807 case BUILT_IN_FETCH_AND_XOR_8:
6808 case BUILT_IN_FETCH_AND_XOR_16:
6809 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6810 target = expand_builtin_sync_operation (mode, exp, XOR,
6811 false, target, ignore);
6812 if (target)
6813 return target;
6814 break;
6816 case BUILT_IN_FETCH_AND_NAND_1:
6817 case BUILT_IN_FETCH_AND_NAND_2:
6818 case BUILT_IN_FETCH_AND_NAND_4:
6819 case BUILT_IN_FETCH_AND_NAND_8:
6820 case BUILT_IN_FETCH_AND_NAND_16:
6821 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6822 target = expand_builtin_sync_operation (mode, exp, NOT,
6823 false, target, ignore);
6824 if (target)
6825 return target;
6826 break;
6828 case BUILT_IN_ADD_AND_FETCH_1:
6829 case BUILT_IN_ADD_AND_FETCH_2:
6830 case BUILT_IN_ADD_AND_FETCH_4:
6831 case BUILT_IN_ADD_AND_FETCH_8:
6832 case BUILT_IN_ADD_AND_FETCH_16:
6833 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6834 target = expand_builtin_sync_operation (mode, exp, PLUS,
6835 true, target, ignore);
6836 if (target)
6837 return target;
6838 break;
6840 case BUILT_IN_SUB_AND_FETCH_1:
6841 case BUILT_IN_SUB_AND_FETCH_2:
6842 case BUILT_IN_SUB_AND_FETCH_4:
6843 case BUILT_IN_SUB_AND_FETCH_8:
6844 case BUILT_IN_SUB_AND_FETCH_16:
6845 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6846 target = expand_builtin_sync_operation (mode, exp, MINUS,
6847 true, target, ignore);
6848 if (target)
6849 return target;
6850 break;
6852 case BUILT_IN_OR_AND_FETCH_1:
6853 case BUILT_IN_OR_AND_FETCH_2:
6854 case BUILT_IN_OR_AND_FETCH_4:
6855 case BUILT_IN_OR_AND_FETCH_8:
6856 case BUILT_IN_OR_AND_FETCH_16:
6857 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6858 target = expand_builtin_sync_operation (mode, exp, IOR,
6859 true, target, ignore);
6860 if (target)
6861 return target;
6862 break;
6864 case BUILT_IN_AND_AND_FETCH_1:
6865 case BUILT_IN_AND_AND_FETCH_2:
6866 case BUILT_IN_AND_AND_FETCH_4:
6867 case BUILT_IN_AND_AND_FETCH_8:
6868 case BUILT_IN_AND_AND_FETCH_16:
6869 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6870 target = expand_builtin_sync_operation (mode, exp, AND,
6871 true, target, ignore);
6872 if (target)
6873 return target;
6874 break;
6876 case BUILT_IN_XOR_AND_FETCH_1:
6877 case BUILT_IN_XOR_AND_FETCH_2:
6878 case BUILT_IN_XOR_AND_FETCH_4:
6879 case BUILT_IN_XOR_AND_FETCH_8:
6880 case BUILT_IN_XOR_AND_FETCH_16:
6881 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6882 target = expand_builtin_sync_operation (mode, exp, XOR,
6883 true, target, ignore);
6884 if (target)
6885 return target;
6886 break;
6888 case BUILT_IN_NAND_AND_FETCH_1:
6889 case BUILT_IN_NAND_AND_FETCH_2:
6890 case BUILT_IN_NAND_AND_FETCH_4:
6891 case BUILT_IN_NAND_AND_FETCH_8:
6892 case BUILT_IN_NAND_AND_FETCH_16:
6893 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6894 target = expand_builtin_sync_operation (mode, exp, NOT,
6895 true, target, ignore);
6896 if (target)
6897 return target;
6898 break;
6900 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6901 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6902 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6903 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6904 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6905 if (mode == VOIDmode)
6906 mode = TYPE_MODE (boolean_type_node);
6907 if (!target || !register_operand (target, mode))
6908 target = gen_reg_rtx (mode);
6910 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6911 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6912 if (target)
6913 return target;
6914 break;
6916 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6917 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6918 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6919 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6920 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6921 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6922 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6923 if (target)
6924 return target;
6925 break;
6927 case BUILT_IN_LOCK_TEST_AND_SET_1:
6928 case BUILT_IN_LOCK_TEST_AND_SET_2:
6929 case BUILT_IN_LOCK_TEST_AND_SET_4:
6930 case BUILT_IN_LOCK_TEST_AND_SET_8:
6931 case BUILT_IN_LOCK_TEST_AND_SET_16:
6932 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6933 target = expand_builtin_lock_test_and_set (mode, exp, target);
6934 if (target)
6935 return target;
6936 break;
6938 case BUILT_IN_LOCK_RELEASE_1:
6939 case BUILT_IN_LOCK_RELEASE_2:
6940 case BUILT_IN_LOCK_RELEASE_4:
6941 case BUILT_IN_LOCK_RELEASE_8:
6942 case BUILT_IN_LOCK_RELEASE_16:
6943 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6944 expand_builtin_lock_release (mode, exp);
6945 return const0_rtx;
6947 case BUILT_IN_SYNCHRONIZE:
6948 expand_builtin_synchronize ();
6949 return const0_rtx;
6951 case BUILT_IN_OBJECT_SIZE:
6952 return expand_builtin_object_size (exp);
6954 case BUILT_IN_MEMCPY_CHK:
6955 case BUILT_IN_MEMPCPY_CHK:
6956 case BUILT_IN_MEMMOVE_CHK:
6957 case BUILT_IN_MEMSET_CHK:
6958 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6959 if (target)
6960 return target;
6961 break;
6963 case BUILT_IN_STRCPY_CHK:
6964 case BUILT_IN_STPCPY_CHK:
6965 case BUILT_IN_STRNCPY_CHK:
6966 case BUILT_IN_STRCAT_CHK:
6967 case BUILT_IN_STRNCAT_CHK:
6968 case BUILT_IN_SNPRINTF_CHK:
6969 case BUILT_IN_VSNPRINTF_CHK:
6970 maybe_emit_chk_warning (exp, fcode);
6971 break;
6973 case BUILT_IN_SPRINTF_CHK:
6974 case BUILT_IN_VSPRINTF_CHK:
6975 maybe_emit_sprintf_chk_warning (exp, fcode);
6976 break;
6978 default: /* just do library call, if unknown builtin */
6979 break;
6982 /* The switch statement above can drop through to cause the function
6983 to be called normally. */
6984 return expand_call (exp, target, ignore);
6987 /* Determine whether a tree node represents a call to a built-in
6988 function. If the tree T is a call to a built-in function with
6989 the right number of arguments of the appropriate types, return
6990 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6991 Otherwise the return value is END_BUILTINS. */
6993 enum built_in_function
6994 builtin_mathfn_code (const_tree t)
6996 const_tree fndecl, arg, parmlist;
6997 const_tree argtype, parmtype;
6998 const_call_expr_arg_iterator iter;
7000 if (TREE_CODE (t) != CALL_EXPR
7001 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7002 return END_BUILTINS;
7004 fndecl = get_callee_fndecl (t);
7005 if (fndecl == NULL_TREE
7006 || TREE_CODE (fndecl) != FUNCTION_DECL
7007 || ! DECL_BUILT_IN (fndecl)
7008 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7009 return END_BUILTINS;
7011 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7012 init_const_call_expr_arg_iterator (t, &iter);
7013 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7015 /* If a function doesn't take a variable number of arguments,
7016 the last element in the list will have type `void'. */
7017 parmtype = TREE_VALUE (parmlist);
7018 if (VOID_TYPE_P (parmtype))
7020 if (more_const_call_expr_args_p (&iter))
7021 return END_BUILTINS;
7022 return DECL_FUNCTION_CODE (fndecl);
7025 if (! more_const_call_expr_args_p (&iter))
7026 return END_BUILTINS;
7028 arg = next_const_call_expr_arg (&iter);
7029 argtype = TREE_TYPE (arg);
7031 if (SCALAR_FLOAT_TYPE_P (parmtype))
7033 if (! SCALAR_FLOAT_TYPE_P (argtype))
7034 return END_BUILTINS;
7036 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7038 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7039 return END_BUILTINS;
7041 else if (POINTER_TYPE_P (parmtype))
7043 if (! POINTER_TYPE_P (argtype))
7044 return END_BUILTINS;
7046 else if (INTEGRAL_TYPE_P (parmtype))
7048 if (! INTEGRAL_TYPE_P (argtype))
7049 return END_BUILTINS;
7051 else
7052 return END_BUILTINS;
7055 /* Variable-length argument list. */
7056 return DECL_FUNCTION_CODE (fndecl);
7059 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7060 evaluate to a constant. */
7062 static tree
7063 fold_builtin_constant_p (tree arg)
7065 /* We return 1 for a numeric type that's known to be a constant
7066 value at compile-time or for an aggregate type that's a
7067 literal constant. */
7068 STRIP_NOPS (arg);
7070 /* If we know this is a constant, emit the constant of one. */
7071 if (CONSTANT_CLASS_P (arg)
7072 || (TREE_CODE (arg) == CONSTRUCTOR
7073 && TREE_CONSTANT (arg)))
7074 return integer_one_node;
7075 if (TREE_CODE (arg) == ADDR_EXPR)
7077 tree op = TREE_OPERAND (arg, 0);
7078 if (TREE_CODE (op) == STRING_CST
7079 || (TREE_CODE (op) == ARRAY_REF
7080 && integer_zerop (TREE_OPERAND (op, 1))
7081 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7082 return integer_one_node;
7085 /* If this expression has side effects, show we don't know it to be a
7086 constant. Likewise if it's a pointer or aggregate type since in
7087 those case we only want literals, since those are only optimized
7088 when generating RTL, not later.
7089 And finally, if we are compiling an initializer, not code, we
7090 need to return a definite result now; there's not going to be any
7091 more optimization done. */
7092 if (TREE_SIDE_EFFECTS (arg)
7093 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7094 || POINTER_TYPE_P (TREE_TYPE (arg))
7095 || cfun == 0
7096 || folding_initializer)
7097 return integer_zero_node;
7099 return NULL_TREE;
7102 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7103 return it as a truthvalue. */
7105 static tree
7106 build_builtin_expect_predicate (tree pred, tree expected)
7108 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7110 fn = built_in_decls[BUILT_IN_EXPECT];
7111 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7112 ret_type = TREE_TYPE (TREE_TYPE (fn));
7113 pred_type = TREE_VALUE (arg_types);
7114 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7116 pred = fold_convert (pred_type, pred);
7117 expected = fold_convert (expected_type, expected);
7118 call_expr = build_call_expr (fn, 2, pred, expected);
7120 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7121 build_int_cst (ret_type, 0));
7124 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7125 NULL_TREE if no simplification is possible. */
7127 static tree
7128 fold_builtin_expect (tree arg0, tree arg1)
7130 tree inner, fndecl;
7131 enum tree_code code;
7133 /* If this is a builtin_expect within a builtin_expect keep the
7134 inner one. See through a comparison against a constant. It
7135 might have been added to create a thruthvalue. */
7136 inner = arg0;
7137 if (COMPARISON_CLASS_P (inner)
7138 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7139 inner = TREE_OPERAND (inner, 0);
7141 if (TREE_CODE (inner) == CALL_EXPR
7142 && (fndecl = get_callee_fndecl (inner))
7143 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7144 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7145 return arg0;
7147 /* Distribute the expected value over short-circuiting operators.
7148 See through the cast from truthvalue_type_node to long. */
7149 inner = arg0;
7150 while (TREE_CODE (inner) == NOP_EXPR
7151 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7152 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7153 inner = TREE_OPERAND (inner, 0);
7155 code = TREE_CODE (inner);
7156 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7158 tree op0 = TREE_OPERAND (inner, 0);
7159 tree op1 = TREE_OPERAND (inner, 1);
7161 op0 = build_builtin_expect_predicate (op0, arg1);
7162 op1 = build_builtin_expect_predicate (op1, arg1);
7163 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7165 return fold_convert (TREE_TYPE (arg0), inner);
7168 /* If the argument isn't invariant then there's nothing else we can do. */
7169 if (!TREE_CONSTANT (arg0))
7170 return NULL_TREE;
7172 /* If we expect that a comparison against the argument will fold to
7173 a constant return the constant. In practice, this means a true
7174 constant or the address of a non-weak symbol. */
7175 inner = arg0;
7176 STRIP_NOPS (inner);
7177 if (TREE_CODE (inner) == ADDR_EXPR)
7181 inner = TREE_OPERAND (inner, 0);
7183 while (TREE_CODE (inner) == COMPONENT_REF
7184 || TREE_CODE (inner) == ARRAY_REF);
7185 if (DECL_P (inner) && DECL_WEAK (inner))
7186 return NULL_TREE;
7189 /* Otherwise, ARG0 already has the proper type for the return value. */
7190 return arg0;
7193 /* Fold a call to __builtin_classify_type with argument ARG. */
7195 static tree
7196 fold_builtin_classify_type (tree arg)
7198 if (arg == 0)
7199 return build_int_cst (NULL_TREE, no_type_class);
7201 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7204 /* Fold a call to __builtin_strlen with argument ARG. */
7206 static tree
7207 fold_builtin_strlen (tree arg)
7209 if (!validate_arg (arg, POINTER_TYPE))
7210 return NULL_TREE;
7211 else
7213 tree len = c_strlen (arg, 0);
7215 if (len)
7217 /* Convert from the internal "sizetype" type to "size_t". */
7218 if (size_type_node)
7219 len = fold_convert (size_type_node, len);
7220 return len;
7223 return NULL_TREE;
7227 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7229 static tree
7230 fold_builtin_inf (tree type, int warn)
7232 REAL_VALUE_TYPE real;
7234 /* __builtin_inff is intended to be usable to define INFINITY on all
7235 targets. If an infinity is not available, INFINITY expands "to a
7236 positive constant of type float that overflows at translation
7237 time", footnote "In this case, using INFINITY will violate the
7238 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7239 Thus we pedwarn to ensure this constraint violation is
7240 diagnosed. */
7241 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7242 pedwarn ("target format does not support infinity");
7244 real_inf (&real);
7245 return build_real (type, real);
7248 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7250 static tree
7251 fold_builtin_nan (tree arg, tree type, int quiet)
7253 REAL_VALUE_TYPE real;
7254 const char *str;
7256 if (!validate_arg (arg, POINTER_TYPE))
7257 return NULL_TREE;
7258 str = c_getstr (arg);
7259 if (!str)
7260 return NULL_TREE;
7262 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7263 return NULL_TREE;
7265 return build_real (type, real);
7268 /* Return true if the floating point expression T has an integer value.
7269 We also allow +Inf, -Inf and NaN to be considered integer values. */
7271 static bool
7272 integer_valued_real_p (tree t)
7274 switch (TREE_CODE (t))
7276 case FLOAT_EXPR:
7277 return true;
7279 case ABS_EXPR:
7280 case SAVE_EXPR:
7281 return integer_valued_real_p (TREE_OPERAND (t, 0));
7283 case COMPOUND_EXPR:
7284 case MODIFY_EXPR:
7285 case BIND_EXPR:
7286 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7288 case PLUS_EXPR:
7289 case MINUS_EXPR:
7290 case MULT_EXPR:
7291 case MIN_EXPR:
7292 case MAX_EXPR:
7293 return integer_valued_real_p (TREE_OPERAND (t, 0))
7294 && integer_valued_real_p (TREE_OPERAND (t, 1));
7296 case COND_EXPR:
7297 return integer_valued_real_p (TREE_OPERAND (t, 1))
7298 && integer_valued_real_p (TREE_OPERAND (t, 2));
7300 case REAL_CST:
7301 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7303 case NOP_EXPR:
7305 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7306 if (TREE_CODE (type) == INTEGER_TYPE)
7307 return true;
7308 if (TREE_CODE (type) == REAL_TYPE)
7309 return integer_valued_real_p (TREE_OPERAND (t, 0));
7310 break;
7313 case CALL_EXPR:
7314 switch (builtin_mathfn_code (t))
7316 CASE_FLT_FN (BUILT_IN_CEIL):
7317 CASE_FLT_FN (BUILT_IN_FLOOR):
7318 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7319 CASE_FLT_FN (BUILT_IN_RINT):
7320 CASE_FLT_FN (BUILT_IN_ROUND):
7321 CASE_FLT_FN (BUILT_IN_TRUNC):
7322 return true;
7324 CASE_FLT_FN (BUILT_IN_FMIN):
7325 CASE_FLT_FN (BUILT_IN_FMAX):
7326 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7327 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7329 default:
7330 break;
7332 break;
7334 default:
7335 break;
7337 return false;
7340 /* FNDECL is assumed to be a builtin where truncation can be propagated
7341 across (for instance floor((double)f) == (double)floorf (f).
7342 Do the transformation for a call with argument ARG. */
7344 static tree
7345 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7347 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7349 if (!validate_arg (arg, REAL_TYPE))
7350 return NULL_TREE;
7352 /* Integer rounding functions are idempotent. */
7353 if (fcode == builtin_mathfn_code (arg))
7354 return arg;
7356 /* If argument is already integer valued, and we don't need to worry
7357 about setting errno, there's no need to perform rounding. */
7358 if (! flag_errno_math && integer_valued_real_p (arg))
7359 return arg;
7361 if (optimize)
7363 tree arg0 = strip_float_extensions (arg);
7364 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7365 tree newtype = TREE_TYPE (arg0);
7366 tree decl;
7368 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7369 && (decl = mathfn_built_in (newtype, fcode)))
7370 return fold_convert (ftype,
7371 build_call_expr (decl, 1,
7372 fold_convert (newtype, arg0)));
7374 return NULL_TREE;
7377 /* FNDECL is assumed to be builtin which can narrow the FP type of
7378 the argument, for instance lround((double)f) -> lroundf (f).
7379 Do the transformation for a call with argument ARG. */
7381 static tree
7382 fold_fixed_mathfn (tree fndecl, tree arg)
7384 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7386 if (!validate_arg (arg, REAL_TYPE))
7387 return NULL_TREE;
7389 /* If argument is already integer valued, and we don't need to worry
7390 about setting errno, there's no need to perform rounding. */
7391 if (! flag_errno_math && integer_valued_real_p (arg))
7392 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7394 if (optimize)
7396 tree ftype = TREE_TYPE (arg);
7397 tree arg0 = strip_float_extensions (arg);
7398 tree newtype = TREE_TYPE (arg0);
7399 tree decl;
7401 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7402 && (decl = mathfn_built_in (newtype, fcode)))
7403 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7406 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7407 sizeof (long long) == sizeof (long). */
7408 if (TYPE_PRECISION (long_long_integer_type_node)
7409 == TYPE_PRECISION (long_integer_type_node))
7411 tree newfn = NULL_TREE;
7412 switch (fcode)
7414 CASE_FLT_FN (BUILT_IN_LLCEIL):
7415 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7416 break;
7418 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7419 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7420 break;
7422 CASE_FLT_FN (BUILT_IN_LLROUND):
7423 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7424 break;
7426 CASE_FLT_FN (BUILT_IN_LLRINT):
7427 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7428 break;
7430 default:
7431 break;
7434 if (newfn)
7436 tree newcall = build_call_expr(newfn, 1, arg);
7437 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7441 return NULL_TREE;
7444 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7445 return type. Return NULL_TREE if no simplification can be made. */
7447 static tree
7448 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7450 tree res;
7452 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7453 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7454 return NULL_TREE;
7456 /* Calculate the result when the argument is a constant. */
7457 if (TREE_CODE (arg) == COMPLEX_CST
7458 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7459 type, mpfr_hypot)))
7460 return res;
7462 if (TREE_CODE (arg) == COMPLEX_EXPR)
7464 tree real = TREE_OPERAND (arg, 0);
7465 tree imag = TREE_OPERAND (arg, 1);
7467 /* If either part is zero, cabs is fabs of the other. */
7468 if (real_zerop (real))
7469 return fold_build1 (ABS_EXPR, type, imag);
7470 if (real_zerop (imag))
7471 return fold_build1 (ABS_EXPR, type, real);
7473 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7474 if (flag_unsafe_math_optimizations
7475 && operand_equal_p (real, imag, OEP_PURE_SAME))
7477 const REAL_VALUE_TYPE sqrt2_trunc
7478 = real_value_truncate (TYPE_MODE (type),
7479 *get_real_const (rv_sqrt2));
7480 STRIP_NOPS (real);
7481 return fold_build2 (MULT_EXPR, type,
7482 fold_build1 (ABS_EXPR, type, real),
7483 build_real (type, sqrt2_trunc));
7487 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7488 if (TREE_CODE (arg) == NEGATE_EXPR
7489 || TREE_CODE (arg) == CONJ_EXPR)
7490 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7492 /* Don't do this when optimizing for size. */
7493 if (flag_unsafe_math_optimizations
7494 && optimize && !optimize_size)
7496 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7498 if (sqrtfn != NULL_TREE)
7500 tree rpart, ipart, result;
7502 arg = builtin_save_expr (arg);
7504 rpart = fold_build1 (REALPART_EXPR, type, arg);
7505 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7507 rpart = builtin_save_expr (rpart);
7508 ipart = builtin_save_expr (ipart);
7510 result = fold_build2 (PLUS_EXPR, type,
7511 fold_build2 (MULT_EXPR, type,
7512 rpart, rpart),
7513 fold_build2 (MULT_EXPR, type,
7514 ipart, ipart));
7516 return build_call_expr (sqrtfn, 1, result);
7520 return NULL_TREE;
7523 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7524 Return NULL_TREE if no simplification can be made. */
7526 static tree
7527 fold_builtin_sqrt (tree arg, tree type)
7530 enum built_in_function fcode;
7531 tree res;
7533 if (!validate_arg (arg, REAL_TYPE))
7534 return NULL_TREE;
7536 /* Calculate the result when the argument is a constant. */
7537 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7538 return res;
7540 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7541 fcode = builtin_mathfn_code (arg);
7542 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7544 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7545 arg = fold_build2 (MULT_EXPR, type,
7546 CALL_EXPR_ARG (arg, 0),
7547 build_real (type, dconsthalf));
7548 return build_call_expr (expfn, 1, arg);
7551 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7552 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7554 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7556 if (powfn)
7558 tree arg0 = CALL_EXPR_ARG (arg, 0);
7559 tree tree_root;
7560 /* The inner root was either sqrt or cbrt. */
7561 REAL_VALUE_TYPE dconstroot =
7562 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7564 /* Adjust for the outer root. */
7565 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7566 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7567 tree_root = build_real (type, dconstroot);
7568 return build_call_expr (powfn, 2, arg0, tree_root);
7572 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7573 if (flag_unsafe_math_optimizations
7574 && (fcode == BUILT_IN_POW
7575 || fcode == BUILT_IN_POWF
7576 || fcode == BUILT_IN_POWL))
7578 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7579 tree arg0 = CALL_EXPR_ARG (arg, 0);
7580 tree arg1 = CALL_EXPR_ARG (arg, 1);
7581 tree narg1;
7582 if (!tree_expr_nonnegative_p (arg0))
7583 arg0 = build1 (ABS_EXPR, type, arg0);
7584 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7585 build_real (type, dconsthalf));
7586 return build_call_expr (powfn, 2, arg0, narg1);
7589 return NULL_TREE;
7592 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7593 Return NULL_TREE if no simplification can be made. */
7595 static tree
7596 fold_builtin_cbrt (tree arg, tree type)
7598 const enum built_in_function fcode = builtin_mathfn_code (arg);
7599 tree res;
7601 if (!validate_arg (arg, REAL_TYPE))
7602 return NULL_TREE;
7604 /* Calculate the result when the argument is a constant. */
7605 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7606 return res;
7608 if (flag_unsafe_math_optimizations)
7610 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7611 if (BUILTIN_EXPONENT_P (fcode))
7613 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7614 const REAL_VALUE_TYPE third_trunc =
7615 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7616 arg = fold_build2 (MULT_EXPR, type,
7617 CALL_EXPR_ARG (arg, 0),
7618 build_real (type, third_trunc));
7619 return build_call_expr (expfn, 1, arg);
7622 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7623 if (BUILTIN_SQRT_P (fcode))
7625 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7627 if (powfn)
7629 tree arg0 = CALL_EXPR_ARG (arg, 0);
7630 tree tree_root;
7631 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7633 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7634 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7635 tree_root = build_real (type, dconstroot);
7636 return build_call_expr (powfn, 2, arg0, tree_root);
7640 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7641 if (BUILTIN_CBRT_P (fcode))
7643 tree arg0 = CALL_EXPR_ARG (arg, 0);
7644 if (tree_expr_nonnegative_p (arg0))
7646 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7648 if (powfn)
7650 tree tree_root;
7651 REAL_VALUE_TYPE dconstroot;
7653 real_arithmetic (&dconstroot, MULT_EXPR,
7654 get_real_const (rv_third),
7655 get_real_const (rv_third));
7656 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7657 tree_root = build_real (type, dconstroot);
7658 return build_call_expr (powfn, 2, arg0, tree_root);
7663 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7664 if (fcode == BUILT_IN_POW
7665 || fcode == BUILT_IN_POWF
7666 || fcode == BUILT_IN_POWL)
7668 tree arg00 = CALL_EXPR_ARG (arg, 0);
7669 tree arg01 = CALL_EXPR_ARG (arg, 1);
7670 if (tree_expr_nonnegative_p (arg00))
7672 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7673 const REAL_VALUE_TYPE dconstroot
7674 = real_value_truncate (TYPE_MODE (type),
7675 *get_real_const (rv_third));
7676 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7677 build_real (type, dconstroot));
7678 return build_call_expr (powfn, 2, arg00, narg01);
7682 return NULL_TREE;
7685 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7686 TYPE is the type of the return value. Return NULL_TREE if no
7687 simplification can be made. */
7689 static tree
7690 fold_builtin_cos (tree arg, tree type, tree fndecl)
7692 tree res, narg;
7694 if (!validate_arg (arg, REAL_TYPE))
7695 return NULL_TREE;
7697 /* Calculate the result when the argument is a constant. */
7698 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7699 return res;
7701 /* Optimize cos(-x) into cos (x). */
7702 if ((narg = fold_strip_sign_ops (arg)))
7703 return build_call_expr (fndecl, 1, narg);
7705 return NULL_TREE;
7708 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7709 Return NULL_TREE if no simplification can be made. */
7711 static tree
7712 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7714 if (validate_arg (arg, REAL_TYPE))
7716 tree res, narg;
7718 /* Calculate the result when the argument is a constant. */
7719 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7720 return res;
7722 /* Optimize cosh(-x) into cosh (x). */
7723 if ((narg = fold_strip_sign_ops (arg)))
7724 return build_call_expr (fndecl, 1, narg);
7727 return NULL_TREE;
7730 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7731 Return NULL_TREE if no simplification can be made. */
7733 static tree
7734 fold_builtin_tan (tree arg, tree type)
7736 enum built_in_function fcode;
7737 tree res;
7739 if (!validate_arg (arg, REAL_TYPE))
7740 return NULL_TREE;
7742 /* Calculate the result when the argument is a constant. */
7743 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7744 return res;
7746 /* Optimize tan(atan(x)) = x. */
7747 fcode = builtin_mathfn_code (arg);
7748 if (flag_unsafe_math_optimizations
7749 && (fcode == BUILT_IN_ATAN
7750 || fcode == BUILT_IN_ATANF
7751 || fcode == BUILT_IN_ATANL))
7752 return CALL_EXPR_ARG (arg, 0);
7754 return NULL_TREE;
7757 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7758 NULL_TREE if no simplification can be made. */
7760 static tree
7761 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7763 tree type;
7764 tree res, fn, call;
7766 if (!validate_arg (arg0, REAL_TYPE)
7767 || !validate_arg (arg1, POINTER_TYPE)
7768 || !validate_arg (arg2, POINTER_TYPE))
7769 return NULL_TREE;
7771 type = TREE_TYPE (arg0);
7773 /* Calculate the result when the argument is a constant. */
7774 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7775 return res;
7777 /* Canonicalize sincos to cexpi. */
7778 if (!TARGET_C99_FUNCTIONS)
7779 return NULL_TREE;
7780 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7781 if (!fn)
7782 return NULL_TREE;
7784 call = build_call_expr (fn, 1, arg0);
7785 call = builtin_save_expr (call);
7787 return build2 (COMPOUND_EXPR, type,
7788 build2 (MODIFY_EXPR, void_type_node,
7789 build_fold_indirect_ref (arg1),
7790 build1 (IMAGPART_EXPR, type, call)),
7791 build2 (MODIFY_EXPR, void_type_node,
7792 build_fold_indirect_ref (arg2),
7793 build1 (REALPART_EXPR, type, call)));
7796 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7797 NULL_TREE if no simplification can be made. */
7799 static tree
7800 fold_builtin_cexp (tree arg0, tree type)
7802 tree rtype;
7803 tree realp, imagp, ifn;
7805 if (!validate_arg (arg0, COMPLEX_TYPE))
7806 return NULL_TREE;
7808 rtype = TREE_TYPE (TREE_TYPE (arg0));
7810 /* In case we can figure out the real part of arg0 and it is constant zero
7811 fold to cexpi. */
7812 if (!TARGET_C99_FUNCTIONS)
7813 return NULL_TREE;
7814 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7815 if (!ifn)
7816 return NULL_TREE;
7818 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7819 && real_zerop (realp))
7821 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7822 return build_call_expr (ifn, 1, narg);
7825 /* In case we can easily decompose real and imaginary parts split cexp
7826 to exp (r) * cexpi (i). */
7827 if (flag_unsafe_math_optimizations
7828 && realp)
7830 tree rfn, rcall, icall;
7832 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7833 if (!rfn)
7834 return NULL_TREE;
7836 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7837 if (!imagp)
7838 return NULL_TREE;
7840 icall = build_call_expr (ifn, 1, imagp);
7841 icall = builtin_save_expr (icall);
7842 rcall = build_call_expr (rfn, 1, realp);
7843 rcall = builtin_save_expr (rcall);
7844 return fold_build2 (COMPLEX_EXPR, type,
7845 fold_build2 (MULT_EXPR, rtype,
7846 rcall,
7847 fold_build1 (REALPART_EXPR, rtype, icall)),
7848 fold_build2 (MULT_EXPR, rtype,
7849 rcall,
7850 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7853 return NULL_TREE;
7856 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7857 Return NULL_TREE if no simplification can be made. */
7859 static tree
7860 fold_builtin_trunc (tree fndecl, tree arg)
7862 if (!validate_arg (arg, REAL_TYPE))
7863 return NULL_TREE;
7865 /* Optimize trunc of constant value. */
7866 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7868 REAL_VALUE_TYPE r, x;
7869 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7871 x = TREE_REAL_CST (arg);
7872 real_trunc (&r, TYPE_MODE (type), &x);
7873 return build_real (type, r);
7876 return fold_trunc_transparent_mathfn (fndecl, arg);
7879 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7880 Return NULL_TREE if no simplification can be made. */
7882 static tree
7883 fold_builtin_floor (tree fndecl, tree arg)
7885 if (!validate_arg (arg, REAL_TYPE))
7886 return NULL_TREE;
7888 /* Optimize floor of constant value. */
7889 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7891 REAL_VALUE_TYPE x;
7893 x = TREE_REAL_CST (arg);
7894 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7896 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7897 REAL_VALUE_TYPE r;
7899 real_floor (&r, TYPE_MODE (type), &x);
7900 return build_real (type, r);
7904 /* Fold floor (x) where x is nonnegative to trunc (x). */
7905 if (tree_expr_nonnegative_p (arg))
7907 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7908 if (truncfn)
7909 return build_call_expr (truncfn, 1, arg);
7912 return fold_trunc_transparent_mathfn (fndecl, arg);
7915 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7916 Return NULL_TREE if no simplification can be made. */
7918 static tree
7919 fold_builtin_ceil (tree fndecl, tree arg)
7921 if (!validate_arg (arg, REAL_TYPE))
7922 return NULL_TREE;
7924 /* Optimize ceil of constant value. */
7925 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7927 REAL_VALUE_TYPE x;
7929 x = TREE_REAL_CST (arg);
7930 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7932 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7933 REAL_VALUE_TYPE r;
7935 real_ceil (&r, TYPE_MODE (type), &x);
7936 return build_real (type, r);
7940 return fold_trunc_transparent_mathfn (fndecl, arg);
7943 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7944 Return NULL_TREE if no simplification can be made. */
7946 static tree
7947 fold_builtin_round (tree fndecl, tree arg)
7949 if (!validate_arg (arg, REAL_TYPE))
7950 return NULL_TREE;
7952 /* Optimize round of constant value. */
7953 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7955 REAL_VALUE_TYPE x;
7957 x = TREE_REAL_CST (arg);
7958 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7960 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7961 REAL_VALUE_TYPE r;
7963 real_round (&r, TYPE_MODE (type), &x);
7964 return build_real (type, r);
7968 return fold_trunc_transparent_mathfn (fndecl, arg);
7971 /* Fold function call to builtin lround, lroundf or lroundl (or the
7972 corresponding long long versions) and other rounding functions. ARG
7973 is the argument to the call. Return NULL_TREE if no simplification
7974 can be made. */
7976 static tree
7977 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7979 if (!validate_arg (arg, REAL_TYPE))
7980 return NULL_TREE;
7982 /* Optimize lround of constant value. */
7983 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7985 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7987 if (real_isfinite (&x))
7989 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7990 tree ftype = TREE_TYPE (arg);
7991 unsigned HOST_WIDE_INT lo2;
7992 HOST_WIDE_INT hi, lo;
7993 REAL_VALUE_TYPE r;
7995 switch (DECL_FUNCTION_CODE (fndecl))
7997 CASE_FLT_FN (BUILT_IN_LFLOOR):
7998 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7999 real_floor (&r, TYPE_MODE (ftype), &x);
8000 break;
8002 CASE_FLT_FN (BUILT_IN_LCEIL):
8003 CASE_FLT_FN (BUILT_IN_LLCEIL):
8004 real_ceil (&r, TYPE_MODE (ftype), &x);
8005 break;
8007 CASE_FLT_FN (BUILT_IN_LROUND):
8008 CASE_FLT_FN (BUILT_IN_LLROUND):
8009 real_round (&r, TYPE_MODE (ftype), &x);
8010 break;
8012 default:
8013 gcc_unreachable ();
8016 REAL_VALUE_TO_INT (&lo, &hi, r);
8017 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8018 return build_int_cst_wide (itype, lo2, hi);
8022 switch (DECL_FUNCTION_CODE (fndecl))
8024 CASE_FLT_FN (BUILT_IN_LFLOOR):
8025 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8026 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8027 if (tree_expr_nonnegative_p (arg))
8028 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8029 arg);
8030 break;
8031 default:;
8034 return fold_fixed_mathfn (fndecl, arg);
8037 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8038 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8039 the argument to the call. Return NULL_TREE if no simplification can
8040 be made. */
8042 static tree
8043 fold_builtin_bitop (tree fndecl, tree arg)
8045 if (!validate_arg (arg, INTEGER_TYPE))
8046 return NULL_TREE;
8048 /* Optimize for constant argument. */
8049 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8051 HOST_WIDE_INT hi, width, result;
8052 unsigned HOST_WIDE_INT lo;
8053 tree type;
8055 type = TREE_TYPE (arg);
8056 width = TYPE_PRECISION (type);
8057 lo = TREE_INT_CST_LOW (arg);
8059 /* Clear all the bits that are beyond the type's precision. */
8060 if (width > HOST_BITS_PER_WIDE_INT)
8062 hi = TREE_INT_CST_HIGH (arg);
8063 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8064 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8066 else
8068 hi = 0;
8069 if (width < HOST_BITS_PER_WIDE_INT)
8070 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8073 switch (DECL_FUNCTION_CODE (fndecl))
8075 CASE_INT_FN (BUILT_IN_FFS):
8076 if (lo != 0)
8077 result = exact_log2 (lo & -lo) + 1;
8078 else if (hi != 0)
8079 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8080 else
8081 result = 0;
8082 break;
8084 CASE_INT_FN (BUILT_IN_CLZ):
8085 if (hi != 0)
8086 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8087 else if (lo != 0)
8088 result = width - floor_log2 (lo) - 1;
8089 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8090 result = width;
8091 break;
8093 CASE_INT_FN (BUILT_IN_CTZ):
8094 if (lo != 0)
8095 result = exact_log2 (lo & -lo);
8096 else if (hi != 0)
8097 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8098 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8099 result = width;
8100 break;
8102 CASE_INT_FN (BUILT_IN_POPCOUNT):
8103 result = 0;
8104 while (lo)
8105 result++, lo &= lo - 1;
8106 while (hi)
8107 result++, hi &= hi - 1;
8108 break;
8110 CASE_INT_FN (BUILT_IN_PARITY):
8111 result = 0;
8112 while (lo)
8113 result++, lo &= lo - 1;
8114 while (hi)
8115 result++, hi &= hi - 1;
8116 result &= 1;
8117 break;
8119 default:
8120 gcc_unreachable ();
8123 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8126 return NULL_TREE;
8129 /* Fold function call to builtin_bswap and the long and long long
8130 variants. Return NULL_TREE if no simplification can be made. */
8131 static tree
8132 fold_builtin_bswap (tree fndecl, tree arg)
8134 if (! validate_arg (arg, INTEGER_TYPE))
8135 return NULL_TREE;
8137 /* Optimize constant value. */
8138 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8140 HOST_WIDE_INT hi, width, r_hi = 0;
8141 unsigned HOST_WIDE_INT lo, r_lo = 0;
8142 tree type;
8144 type = TREE_TYPE (arg);
8145 width = TYPE_PRECISION (type);
8146 lo = TREE_INT_CST_LOW (arg);
8147 hi = TREE_INT_CST_HIGH (arg);
8149 switch (DECL_FUNCTION_CODE (fndecl))
8151 case BUILT_IN_BSWAP32:
8152 case BUILT_IN_BSWAP64:
8154 int s;
8156 for (s = 0; s < width; s += 8)
8158 int d = width - s - 8;
8159 unsigned HOST_WIDE_INT byte;
8161 if (s < HOST_BITS_PER_WIDE_INT)
8162 byte = (lo >> s) & 0xff;
8163 else
8164 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8166 if (d < HOST_BITS_PER_WIDE_INT)
8167 r_lo |= byte << d;
8168 else
8169 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8173 break;
8175 default:
8176 gcc_unreachable ();
8179 if (width < HOST_BITS_PER_WIDE_INT)
8180 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8181 else
8182 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8185 return NULL_TREE;
8188 /* Return true if EXPR is the real constant contained in VALUE. */
8190 static bool
8191 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8193 STRIP_NOPS (expr);
8195 return ((TREE_CODE (expr) == REAL_CST
8196 && !TREE_OVERFLOW (expr)
8197 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8198 || (TREE_CODE (expr) == COMPLEX_CST
8199 && real_dconstp (TREE_REALPART (expr), value)
8200 && real_zerop (TREE_IMAGPART (expr))));
8203 /* A subroutine of fold_builtin to fold the various logarithmic
8204 functions. Return NULL_TREE if no simplification can me made.
8205 FUNC is the corresponding MPFR logarithm function. */
8207 static tree
8208 fold_builtin_logarithm (tree fndecl, tree arg,
8209 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8211 if (validate_arg (arg, REAL_TYPE))
8213 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8214 tree res;
8215 const enum built_in_function fcode = builtin_mathfn_code (arg);
8217 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8218 instead we'll look for 'e' truncated to MODE. So only do
8219 this if flag_unsafe_math_optimizations is set. */
8220 if (flag_unsafe_math_optimizations && func == mpfr_log)
8222 const REAL_VALUE_TYPE e_truncated =
8223 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8224 if (real_dconstp (arg, &e_truncated))
8225 return build_real (type, dconst1);
8228 /* Calculate the result when the argument is a constant. */
8229 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8230 return res;
8232 /* Special case, optimize logN(expN(x)) = x. */
8233 if (flag_unsafe_math_optimizations
8234 && ((func == mpfr_log
8235 && (fcode == BUILT_IN_EXP
8236 || fcode == BUILT_IN_EXPF
8237 || fcode == BUILT_IN_EXPL))
8238 || (func == mpfr_log2
8239 && (fcode == BUILT_IN_EXP2
8240 || fcode == BUILT_IN_EXP2F
8241 || fcode == BUILT_IN_EXP2L))
8242 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8243 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8245 /* Optimize logN(func()) for various exponential functions. We
8246 want to determine the value "x" and the power "exponent" in
8247 order to transform logN(x**exponent) into exponent*logN(x). */
8248 if (flag_unsafe_math_optimizations)
8250 tree exponent = 0, x = 0;
8252 switch (fcode)
8254 CASE_FLT_FN (BUILT_IN_EXP):
8255 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8256 x = build_real (type,
8257 real_value_truncate (TYPE_MODE (type),
8258 *get_real_const (rv_e)));
8259 exponent = CALL_EXPR_ARG (arg, 0);
8260 break;
8261 CASE_FLT_FN (BUILT_IN_EXP2):
8262 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8263 x = build_real (type, dconst2);
8264 exponent = CALL_EXPR_ARG (arg, 0);
8265 break;
8266 CASE_FLT_FN (BUILT_IN_EXP10):
8267 CASE_FLT_FN (BUILT_IN_POW10):
8268 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8270 REAL_VALUE_TYPE dconst10;
8271 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8272 x = build_real (type, dconst10);
8274 exponent = CALL_EXPR_ARG (arg, 0);
8275 break;
8276 CASE_FLT_FN (BUILT_IN_SQRT):
8277 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8278 x = CALL_EXPR_ARG (arg, 0);
8279 exponent = build_real (type, dconsthalf);
8280 break;
8281 CASE_FLT_FN (BUILT_IN_CBRT):
8282 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8283 x = CALL_EXPR_ARG (arg, 0);
8284 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8285 *get_real_const (rv_third)));
8286 break;
8287 CASE_FLT_FN (BUILT_IN_POW):
8288 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8289 x = CALL_EXPR_ARG (arg, 0);
8290 exponent = CALL_EXPR_ARG (arg, 1);
8291 break;
8292 default:
8293 break;
8296 /* Now perform the optimization. */
8297 if (x && exponent)
8299 tree logfn = build_call_expr (fndecl, 1, x);
8300 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8305 return NULL_TREE;
8308 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8309 NULL_TREE if no simplification can be made. */
8311 static tree
8312 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8314 tree res, narg0, narg1;
8316 if (!validate_arg (arg0, REAL_TYPE)
8317 || !validate_arg (arg1, REAL_TYPE))
8318 return NULL_TREE;
8320 /* Calculate the result when the argument is a constant. */
8321 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8322 return res;
8324 /* If either argument to hypot has a negate or abs, strip that off.
8325 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8326 narg0 = fold_strip_sign_ops (arg0);
8327 narg1 = fold_strip_sign_ops (arg1);
8328 if (narg0 || narg1)
8330 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8331 narg1 ? narg1 : arg1);
8334 /* If either argument is zero, hypot is fabs of the other. */
8335 if (real_zerop (arg0))
8336 return fold_build1 (ABS_EXPR, type, arg1);
8337 else if (real_zerop (arg1))
8338 return fold_build1 (ABS_EXPR, type, arg0);
8340 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8341 if (flag_unsafe_math_optimizations
8342 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8344 const REAL_VALUE_TYPE sqrt2_trunc
8345 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8346 return fold_build2 (MULT_EXPR, type,
8347 fold_build1 (ABS_EXPR, type, arg0),
8348 build_real (type, sqrt2_trunc));
8351 return NULL_TREE;
8355 /* Fold a builtin function call to pow, powf, or powl. Return
8356 NULL_TREE if no simplification can be made. */
8357 static tree
8358 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8360 tree res;
8362 if (!validate_arg (arg0, REAL_TYPE)
8363 || !validate_arg (arg1, REAL_TYPE))
8364 return NULL_TREE;
8366 /* Calculate the result when the argument is a constant. */
8367 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8368 return res;
8370 /* Optimize pow(1.0,y) = 1.0. */
8371 if (real_onep (arg0))
8372 return omit_one_operand (type, build_real (type, dconst1), arg1);
8374 if (TREE_CODE (arg1) == REAL_CST
8375 && !TREE_OVERFLOW (arg1))
8377 REAL_VALUE_TYPE cint;
8378 REAL_VALUE_TYPE c;
8379 HOST_WIDE_INT n;
8381 c = TREE_REAL_CST (arg1);
8383 /* Optimize pow(x,0.0) = 1.0. */
8384 if (REAL_VALUES_EQUAL (c, dconst0))
8385 return omit_one_operand (type, build_real (type, dconst1),
8386 arg0);
8388 /* Optimize pow(x,1.0) = x. */
8389 if (REAL_VALUES_EQUAL (c, dconst1))
8390 return arg0;
8392 /* Optimize pow(x,-1.0) = 1.0/x. */
8393 if (REAL_VALUES_EQUAL (c, dconstm1))
8394 return fold_build2 (RDIV_EXPR, type,
8395 build_real (type, dconst1), arg0);
8397 /* Optimize pow(x,0.5) = sqrt(x). */
8398 if (flag_unsafe_math_optimizations
8399 && REAL_VALUES_EQUAL (c, dconsthalf))
8401 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8403 if (sqrtfn != NULL_TREE)
8404 return build_call_expr (sqrtfn, 1, arg0);
8407 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8408 if (flag_unsafe_math_optimizations)
8410 const REAL_VALUE_TYPE dconstroot
8411 = real_value_truncate (TYPE_MODE (type),
8412 *get_real_const (rv_third));
8414 if (REAL_VALUES_EQUAL (c, dconstroot))
8416 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8417 if (cbrtfn != NULL_TREE)
8418 return build_call_expr (cbrtfn, 1, arg0);
8422 /* Check for an integer exponent. */
8423 n = real_to_integer (&c);
8424 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8425 if (real_identical (&c, &cint))
8427 /* Attempt to evaluate pow at compile-time. */
8428 if (TREE_CODE (arg0) == REAL_CST
8429 && !TREE_OVERFLOW (arg0))
8431 REAL_VALUE_TYPE x;
8432 bool inexact;
8434 x = TREE_REAL_CST (arg0);
8435 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8436 if (flag_unsafe_math_optimizations || !inexact)
8437 return build_real (type, x);
8440 /* Strip sign ops from even integer powers. */
8441 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8443 tree narg0 = fold_strip_sign_ops (arg0);
8444 if (narg0)
8445 return build_call_expr (fndecl, 2, narg0, arg1);
8450 if (flag_unsafe_math_optimizations)
8452 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8454 /* Optimize pow(expN(x),y) = expN(x*y). */
8455 if (BUILTIN_EXPONENT_P (fcode))
8457 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8458 tree arg = CALL_EXPR_ARG (arg0, 0);
8459 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8460 return build_call_expr (expfn, 1, arg);
8463 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8464 if (BUILTIN_SQRT_P (fcode))
8466 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8467 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8468 build_real (type, dconsthalf));
8469 return build_call_expr (fndecl, 2, narg0, narg1);
8472 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8473 if (BUILTIN_CBRT_P (fcode))
8475 tree arg = CALL_EXPR_ARG (arg0, 0);
8476 if (tree_expr_nonnegative_p (arg))
8478 const REAL_VALUE_TYPE dconstroot
8479 = real_value_truncate (TYPE_MODE (type),
8480 *get_real_const (rv_third));
8481 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8482 build_real (type, dconstroot));
8483 return build_call_expr (fndecl, 2, arg, narg1);
8487 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8488 if (fcode == BUILT_IN_POW
8489 || fcode == BUILT_IN_POWF
8490 || fcode == BUILT_IN_POWL)
8492 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8493 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8494 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8495 return build_call_expr (fndecl, 2, arg00, narg1);
8499 return NULL_TREE;
8502 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8503 Return NULL_TREE if no simplification can be made. */
8504 static tree
8505 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8506 tree arg0, tree arg1, tree type)
8508 if (!validate_arg (arg0, REAL_TYPE)
8509 || !validate_arg (arg1, INTEGER_TYPE))
8510 return NULL_TREE;
8512 /* Optimize pow(1.0,y) = 1.0. */
8513 if (real_onep (arg0))
8514 return omit_one_operand (type, build_real (type, dconst1), arg1);
8516 if (host_integerp (arg1, 0))
8518 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8520 /* Evaluate powi at compile-time. */
8521 if (TREE_CODE (arg0) == REAL_CST
8522 && !TREE_OVERFLOW (arg0))
8524 REAL_VALUE_TYPE x;
8525 x = TREE_REAL_CST (arg0);
8526 real_powi (&x, TYPE_MODE (type), &x, c);
8527 return build_real (type, x);
8530 /* Optimize pow(x,0) = 1.0. */
8531 if (c == 0)
8532 return omit_one_operand (type, build_real (type, dconst1),
8533 arg0);
8535 /* Optimize pow(x,1) = x. */
8536 if (c == 1)
8537 return arg0;
8539 /* Optimize pow(x,-1) = 1.0/x. */
8540 if (c == -1)
8541 return fold_build2 (RDIV_EXPR, type,
8542 build_real (type, dconst1), arg0);
8545 return NULL_TREE;
8548 /* A subroutine of fold_builtin to fold the various exponent
8549 functions. Return NULL_TREE if no simplification can be made.
8550 FUNC is the corresponding MPFR exponent function. */
8552 static tree
8553 fold_builtin_exponent (tree fndecl, tree arg,
8554 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8556 if (validate_arg (arg, REAL_TYPE))
8558 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8559 tree res;
8561 /* Calculate the result when the argument is a constant. */
8562 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8563 return res;
8565 /* Optimize expN(logN(x)) = x. */
8566 if (flag_unsafe_math_optimizations)
8568 const enum built_in_function fcode = builtin_mathfn_code (arg);
8570 if ((func == mpfr_exp
8571 && (fcode == BUILT_IN_LOG
8572 || fcode == BUILT_IN_LOGF
8573 || fcode == BUILT_IN_LOGL))
8574 || (func == mpfr_exp2
8575 && (fcode == BUILT_IN_LOG2
8576 || fcode == BUILT_IN_LOG2F
8577 || fcode == BUILT_IN_LOG2L))
8578 || (func == mpfr_exp10
8579 && (fcode == BUILT_IN_LOG10
8580 || fcode == BUILT_IN_LOG10F
8581 || fcode == BUILT_IN_LOG10L)))
8582 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8586 return NULL_TREE;
8589 /* Return true if VAR is a VAR_DECL or a component thereof. */
8591 static bool
8592 var_decl_component_p (tree var)
8594 tree inner = var;
8595 while (handled_component_p (inner))
8596 inner = TREE_OPERAND (inner, 0);
8597 return SSA_VAR_P (inner);
8600 /* Fold function call to builtin memset. Return
8601 NULL_TREE if no simplification can be made. */
8603 static tree
8604 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8606 tree var, ret;
8607 unsigned HOST_WIDE_INT length, cval;
8609 if (! validate_arg (dest, POINTER_TYPE)
8610 || ! validate_arg (c, INTEGER_TYPE)
8611 || ! validate_arg (len, INTEGER_TYPE))
8612 return NULL_TREE;
8614 if (! host_integerp (len, 1))
8615 return NULL_TREE;
8617 /* If the LEN parameter is zero, return DEST. */
8618 if (integer_zerop (len))
8619 return omit_one_operand (type, dest, c);
8621 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8622 return NULL_TREE;
8624 var = dest;
8625 STRIP_NOPS (var);
8626 if (TREE_CODE (var) != ADDR_EXPR)
8627 return NULL_TREE;
8629 var = TREE_OPERAND (var, 0);
8630 if (TREE_THIS_VOLATILE (var))
8631 return NULL_TREE;
8633 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8634 && !POINTER_TYPE_P (TREE_TYPE (var)))
8635 return NULL_TREE;
8637 if (! var_decl_component_p (var))
8638 return NULL_TREE;
8640 length = tree_low_cst (len, 1);
8641 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8642 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8643 < (int) length)
8644 return NULL_TREE;
8646 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8647 return NULL_TREE;
8649 if (integer_zerop (c))
8650 cval = 0;
8651 else
8653 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8654 return NULL_TREE;
8656 cval = tree_low_cst (c, 1);
8657 cval &= 0xff;
8658 cval |= cval << 8;
8659 cval |= cval << 16;
8660 cval |= (cval << 31) << 1;
8663 ret = build_int_cst_type (TREE_TYPE (var), cval);
8664 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8665 if (ignore)
8666 return ret;
8668 return omit_one_operand (type, dest, ret);
8671 /* Fold function call to builtin memset. Return
8672 NULL_TREE if no simplification can be made. */
8674 static tree
8675 fold_builtin_bzero (tree dest, tree size, bool ignore)
8677 if (! validate_arg (dest, POINTER_TYPE)
8678 || ! validate_arg (size, INTEGER_TYPE))
8679 return NULL_TREE;
8681 if (!ignore)
8682 return NULL_TREE;
8684 /* New argument list transforming bzero(ptr x, int y) to
8685 memset(ptr x, int 0, size_t y). This is done this way
8686 so that if it isn't expanded inline, we fallback to
8687 calling bzero instead of memset. */
8689 return fold_builtin_memset (dest, integer_zero_node,
8690 fold_convert (sizetype, size),
8691 void_type_node, ignore);
8694 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8695 NULL_TREE if no simplification can be made.
8696 If ENDP is 0, return DEST (like memcpy).
8697 If ENDP is 1, return DEST+LEN (like mempcpy).
8698 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8699 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8700 (memmove). */
8702 static tree
8703 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8705 tree destvar, srcvar, expr;
8707 if (! validate_arg (dest, POINTER_TYPE)
8708 || ! validate_arg (src, POINTER_TYPE)
8709 || ! validate_arg (len, INTEGER_TYPE))
8710 return NULL_TREE;
8712 /* If the LEN parameter is zero, return DEST. */
8713 if (integer_zerop (len))
8714 return omit_one_operand (type, dest, src);
8716 /* If SRC and DEST are the same (and not volatile), return
8717 DEST{,+LEN,+LEN-1}. */
8718 if (operand_equal_p (src, dest, 0))
8719 expr = len;
8720 else
8722 tree srctype, desttype;
8723 if (endp == 3)
8725 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8726 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8728 /* Both DEST and SRC must be pointer types.
8729 ??? This is what old code did. Is the testing for pointer types
8730 really mandatory?
8732 If either SRC is readonly or length is 1, we can use memcpy. */
8733 if (dest_align && src_align
8734 && (readonly_data_expr (src)
8735 || (host_integerp (len, 1)
8736 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8737 tree_low_cst (len, 1)))))
8739 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8740 if (!fn)
8741 return NULL_TREE;
8742 return build_call_expr (fn, 3, dest, src, len);
8744 return NULL_TREE;
8747 if (!host_integerp (len, 0))
8748 return NULL_TREE;
8749 /* FIXME:
8750 This logic lose for arguments like (type *)malloc (sizeof (type)),
8751 since we strip the casts of up to VOID return value from malloc.
8752 Perhaps we ought to inherit type from non-VOID argument here? */
8753 STRIP_NOPS (src);
8754 STRIP_NOPS (dest);
8755 srctype = TREE_TYPE (TREE_TYPE (src));
8756 desttype = TREE_TYPE (TREE_TYPE (dest));
8757 if (!srctype || !desttype
8758 || !TYPE_SIZE_UNIT (srctype)
8759 || !TYPE_SIZE_UNIT (desttype)
8760 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8761 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8762 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8763 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8764 return NULL_TREE;
8766 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8767 < (int) TYPE_ALIGN (desttype)
8768 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8769 < (int) TYPE_ALIGN (srctype)))
8770 return NULL_TREE;
8772 if (!ignore)
8773 dest = builtin_save_expr (dest);
8775 srcvar = build_fold_indirect_ref (src);
8776 if (TREE_THIS_VOLATILE (srcvar))
8777 return NULL_TREE;
8778 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8779 return NULL_TREE;
8780 /* With memcpy, it is possible to bypass aliasing rules, so without
8781 this check i.e. execute/20060930-2.c would be misoptimized, because
8782 it use conflicting alias set to hold argument for the memcpy call.
8783 This check is probably unnecessary with -fno-strict-aliasing.
8784 Similarly for destvar. See also PR29286. */
8785 if (!var_decl_component_p (srcvar)
8786 /* Accept: memcpy (*char_var, "test", 1); that simplify
8787 to char_var='t'; */
8788 || is_gimple_min_invariant (srcvar)
8789 || readonly_data_expr (src))
8790 return NULL_TREE;
8792 destvar = build_fold_indirect_ref (dest);
8793 if (TREE_THIS_VOLATILE (destvar))
8794 return NULL_TREE;
8795 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8796 return NULL_TREE;
8797 if (!var_decl_component_p (destvar))
8798 return NULL_TREE;
8800 if (srctype == desttype
8801 || (gimple_in_ssa_p (cfun)
8802 && useless_type_conversion_p (desttype, srctype)))
8803 expr = srcvar;
8804 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8805 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8806 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8807 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8808 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8809 else
8810 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8811 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8814 if (ignore)
8815 return expr;
8817 if (endp == 0 || endp == 3)
8818 return omit_one_operand (type, dest, expr);
8820 if (expr == len)
8821 expr = NULL_TREE;
8823 if (endp == 2)
8824 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8825 ssize_int (1));
8827 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8828 dest = fold_convert (type, dest);
8829 if (expr)
8830 dest = omit_one_operand (type, dest, expr);
8831 return dest;
8834 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8835 If LEN is not NULL, it represents the length of the string to be
8836 copied. Return NULL_TREE if no simplification can be made. */
8838 tree
8839 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8841 tree fn;
8843 if (!validate_arg (dest, POINTER_TYPE)
8844 || !validate_arg (src, POINTER_TYPE))
8845 return NULL_TREE;
8847 /* If SRC and DEST are the same (and not volatile), return DEST. */
8848 if (operand_equal_p (src, dest, 0))
8849 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8851 if (optimize_size)
8852 return NULL_TREE;
8854 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8855 if (!fn)
8856 return NULL_TREE;
8858 if (!len)
8860 len = c_strlen (src, 1);
8861 if (! len || TREE_SIDE_EFFECTS (len))
8862 return NULL_TREE;
8865 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8866 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8867 build_call_expr (fn, 3, dest, src, len));
8870 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8871 If SLEN is not NULL, it represents the length of the source string.
8872 Return NULL_TREE if no simplification can be made. */
8874 tree
8875 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8877 tree fn;
8879 if (!validate_arg (dest, POINTER_TYPE)
8880 || !validate_arg (src, POINTER_TYPE)
8881 || !validate_arg (len, INTEGER_TYPE))
8882 return NULL_TREE;
8884 /* If the LEN parameter is zero, return DEST. */
8885 if (integer_zerop (len))
8886 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8888 /* We can't compare slen with len as constants below if len is not a
8889 constant. */
8890 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8891 return NULL_TREE;
8893 if (!slen)
8894 slen = c_strlen (src, 1);
8896 /* Now, we must be passed a constant src ptr parameter. */
8897 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8898 return NULL_TREE;
8900 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8902 /* We do not support simplification of this case, though we do
8903 support it when expanding trees into RTL. */
8904 /* FIXME: generate a call to __builtin_memset. */
8905 if (tree_int_cst_lt (slen, len))
8906 return NULL_TREE;
8908 /* OK transform into builtin memcpy. */
8909 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8910 if (!fn)
8911 return NULL_TREE;
8912 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8913 build_call_expr (fn, 3, dest, src, len));
8916 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8917 arguments to the call, and TYPE is its return type.
8918 Return NULL_TREE if no simplification can be made. */
8920 static tree
8921 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8923 if (!validate_arg (arg1, POINTER_TYPE)
8924 || !validate_arg (arg2, INTEGER_TYPE)
8925 || !validate_arg (len, INTEGER_TYPE))
8926 return NULL_TREE;
8927 else
8929 const char *p1;
8931 if (TREE_CODE (arg2) != INTEGER_CST
8932 || !host_integerp (len, 1))
8933 return NULL_TREE;
8935 p1 = c_getstr (arg1);
8936 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8938 char c;
8939 const char *r;
8940 tree tem;
8942 if (target_char_cast (arg2, &c))
8943 return NULL_TREE;
8945 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8947 if (r == NULL)
8948 return build_int_cst (TREE_TYPE (arg1), 0);
8950 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8951 size_int (r - p1));
8952 return fold_convert (type, tem);
8954 return NULL_TREE;
8958 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8959 Return NULL_TREE if no simplification can be made. */
8961 static tree
8962 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8964 const char *p1, *p2;
8966 if (!validate_arg (arg1, POINTER_TYPE)
8967 || !validate_arg (arg2, POINTER_TYPE)
8968 || !validate_arg (len, INTEGER_TYPE))
8969 return NULL_TREE;
8971 /* If the LEN parameter is zero, return zero. */
8972 if (integer_zerop (len))
8973 return omit_two_operands (integer_type_node, integer_zero_node,
8974 arg1, arg2);
8976 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8977 if (operand_equal_p (arg1, arg2, 0))
8978 return omit_one_operand (integer_type_node, integer_zero_node, len);
8980 p1 = c_getstr (arg1);
8981 p2 = c_getstr (arg2);
8983 /* If all arguments are constant, and the value of len is not greater
8984 than the lengths of arg1 and arg2, evaluate at compile-time. */
8985 if (host_integerp (len, 1) && p1 && p2
8986 && compare_tree_int (len, strlen (p1) + 1) <= 0
8987 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8989 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8991 if (r > 0)
8992 return integer_one_node;
8993 else if (r < 0)
8994 return integer_minus_one_node;
8995 else
8996 return integer_zero_node;
8999 /* If len parameter is one, return an expression corresponding to
9000 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9001 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9003 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9004 tree cst_uchar_ptr_node
9005 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9007 tree ind1 = fold_convert (integer_type_node,
9008 build1 (INDIRECT_REF, cst_uchar_node,
9009 fold_convert (cst_uchar_ptr_node,
9010 arg1)));
9011 tree ind2 = fold_convert (integer_type_node,
9012 build1 (INDIRECT_REF, cst_uchar_node,
9013 fold_convert (cst_uchar_ptr_node,
9014 arg2)));
9015 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9018 return NULL_TREE;
9021 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9022 Return NULL_TREE if no simplification can be made. */
9024 static tree
9025 fold_builtin_strcmp (tree arg1, tree arg2)
9027 const char *p1, *p2;
9029 if (!validate_arg (arg1, POINTER_TYPE)
9030 || !validate_arg (arg2, POINTER_TYPE))
9031 return NULL_TREE;
9033 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9034 if (operand_equal_p (arg1, arg2, 0))
9035 return integer_zero_node;
9037 p1 = c_getstr (arg1);
9038 p2 = c_getstr (arg2);
9040 if (p1 && p2)
9042 const int i = strcmp (p1, p2);
9043 if (i < 0)
9044 return integer_minus_one_node;
9045 else if (i > 0)
9046 return integer_one_node;
9047 else
9048 return integer_zero_node;
9051 /* If the second arg is "", return *(const unsigned char*)arg1. */
9052 if (p2 && *p2 == '\0')
9054 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9055 tree cst_uchar_ptr_node
9056 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9058 return fold_convert (integer_type_node,
9059 build1 (INDIRECT_REF, cst_uchar_node,
9060 fold_convert (cst_uchar_ptr_node,
9061 arg1)));
9064 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9065 if (p1 && *p1 == '\0')
9067 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9068 tree cst_uchar_ptr_node
9069 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9071 tree temp = fold_convert (integer_type_node,
9072 build1 (INDIRECT_REF, cst_uchar_node,
9073 fold_convert (cst_uchar_ptr_node,
9074 arg2)));
9075 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9078 return NULL_TREE;
9081 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9082 Return NULL_TREE if no simplification can be made. */
9084 static tree
9085 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9087 const char *p1, *p2;
9089 if (!validate_arg (arg1, POINTER_TYPE)
9090 || !validate_arg (arg2, POINTER_TYPE)
9091 || !validate_arg (len, INTEGER_TYPE))
9092 return NULL_TREE;
9094 /* If the LEN parameter is zero, return zero. */
9095 if (integer_zerop (len))
9096 return omit_two_operands (integer_type_node, integer_zero_node,
9097 arg1, arg2);
9099 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9100 if (operand_equal_p (arg1, arg2, 0))
9101 return omit_one_operand (integer_type_node, integer_zero_node, len);
9103 p1 = c_getstr (arg1);
9104 p2 = c_getstr (arg2);
9106 if (host_integerp (len, 1) && p1 && p2)
9108 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9109 if (i > 0)
9110 return integer_one_node;
9111 else if (i < 0)
9112 return integer_minus_one_node;
9113 else
9114 return integer_zero_node;
9117 /* If the second arg is "", and the length is greater than zero,
9118 return *(const unsigned char*)arg1. */
9119 if (p2 && *p2 == '\0'
9120 && TREE_CODE (len) == INTEGER_CST
9121 && tree_int_cst_sgn (len) == 1)
9123 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9124 tree cst_uchar_ptr_node
9125 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9127 return fold_convert (integer_type_node,
9128 build1 (INDIRECT_REF, cst_uchar_node,
9129 fold_convert (cst_uchar_ptr_node,
9130 arg1)));
9133 /* If the first arg is "", and the length is greater than zero,
9134 return -*(const unsigned char*)arg2. */
9135 if (p1 && *p1 == '\0'
9136 && TREE_CODE (len) == INTEGER_CST
9137 && tree_int_cst_sgn (len) == 1)
9139 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9140 tree cst_uchar_ptr_node
9141 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9143 tree temp = fold_convert (integer_type_node,
9144 build1 (INDIRECT_REF, cst_uchar_node,
9145 fold_convert (cst_uchar_ptr_node,
9146 arg2)));
9147 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9150 /* If len parameter is one, return an expression corresponding to
9151 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9152 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9154 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9155 tree cst_uchar_ptr_node
9156 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9158 tree ind1 = fold_convert (integer_type_node,
9159 build1 (INDIRECT_REF, cst_uchar_node,
9160 fold_convert (cst_uchar_ptr_node,
9161 arg1)));
9162 tree ind2 = fold_convert (integer_type_node,
9163 build1 (INDIRECT_REF, cst_uchar_node,
9164 fold_convert (cst_uchar_ptr_node,
9165 arg2)));
9166 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9169 return NULL_TREE;
9172 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9173 ARG. Return NULL_TREE if no simplification can be made. */
9175 static tree
9176 fold_builtin_signbit (tree arg, tree type)
9178 tree temp;
9180 if (!validate_arg (arg, REAL_TYPE))
9181 return NULL_TREE;
9183 /* If ARG is a compile-time constant, determine the result. */
9184 if (TREE_CODE (arg) == REAL_CST
9185 && !TREE_OVERFLOW (arg))
9187 REAL_VALUE_TYPE c;
9189 c = TREE_REAL_CST (arg);
9190 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9191 return fold_convert (type, temp);
9194 /* If ARG is non-negative, the result is always zero. */
9195 if (tree_expr_nonnegative_p (arg))
9196 return omit_one_operand (type, integer_zero_node, arg);
9198 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9199 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9200 return fold_build2 (LT_EXPR, type, arg,
9201 build_real (TREE_TYPE (arg), dconst0));
9203 return NULL_TREE;
9206 /* Fold function call to builtin copysign, copysignf or copysignl with
9207 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9208 be made. */
9210 static tree
9211 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9213 tree tem;
9215 if (!validate_arg (arg1, REAL_TYPE)
9216 || !validate_arg (arg2, REAL_TYPE))
9217 return NULL_TREE;
9219 /* copysign(X,X) is X. */
9220 if (operand_equal_p (arg1, arg2, 0))
9221 return fold_convert (type, arg1);
9223 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9224 if (TREE_CODE (arg1) == REAL_CST
9225 && TREE_CODE (arg2) == REAL_CST
9226 && !TREE_OVERFLOW (arg1)
9227 && !TREE_OVERFLOW (arg2))
9229 REAL_VALUE_TYPE c1, c2;
9231 c1 = TREE_REAL_CST (arg1);
9232 c2 = TREE_REAL_CST (arg2);
9233 /* c1.sign := c2.sign. */
9234 real_copysign (&c1, &c2);
9235 return build_real (type, c1);
9238 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9239 Remember to evaluate Y for side-effects. */
9240 if (tree_expr_nonnegative_p (arg2))
9241 return omit_one_operand (type,
9242 fold_build1 (ABS_EXPR, type, arg1),
9243 arg2);
9245 /* Strip sign changing operations for the first argument. */
9246 tem = fold_strip_sign_ops (arg1);
9247 if (tem)
9248 return build_call_expr (fndecl, 2, tem, arg2);
9250 return NULL_TREE;
9253 /* Fold a call to builtin isascii with argument ARG. */
9255 static tree
9256 fold_builtin_isascii (tree arg)
9258 if (!validate_arg (arg, INTEGER_TYPE))
9259 return NULL_TREE;
9260 else
9262 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9263 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9264 build_int_cst (NULL_TREE,
9265 ~ (unsigned HOST_WIDE_INT) 0x7f));
9266 return fold_build2 (EQ_EXPR, integer_type_node,
9267 arg, integer_zero_node);
9271 /* Fold a call to builtin toascii with argument ARG. */
9273 static tree
9274 fold_builtin_toascii (tree arg)
9276 if (!validate_arg (arg, INTEGER_TYPE))
9277 return NULL_TREE;
9279 /* Transform toascii(c) -> (c & 0x7f). */
9280 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9281 build_int_cst (NULL_TREE, 0x7f));
9284 /* Fold a call to builtin isdigit with argument ARG. */
9286 static tree
9287 fold_builtin_isdigit (tree arg)
9289 if (!validate_arg (arg, INTEGER_TYPE))
9290 return NULL_TREE;
9291 else
9293 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9294 /* According to the C standard, isdigit is unaffected by locale.
9295 However, it definitely is affected by the target character set. */
9296 unsigned HOST_WIDE_INT target_digit0
9297 = lang_hooks.to_target_charset ('0');
9299 if (target_digit0 == 0)
9300 return NULL_TREE;
9302 arg = fold_convert (unsigned_type_node, arg);
9303 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9304 build_int_cst (unsigned_type_node, target_digit0));
9305 return fold_build2 (LE_EXPR, integer_type_node, arg,
9306 build_int_cst (unsigned_type_node, 9));
9310 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9312 static tree
9313 fold_builtin_fabs (tree arg, tree type)
9315 if (!validate_arg (arg, REAL_TYPE))
9316 return NULL_TREE;
9318 arg = fold_convert (type, arg);
9319 if (TREE_CODE (arg) == REAL_CST)
9320 return fold_abs_const (arg, type);
9321 return fold_build1 (ABS_EXPR, type, arg);
9324 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9326 static tree
9327 fold_builtin_abs (tree arg, tree type)
9329 if (!validate_arg (arg, INTEGER_TYPE))
9330 return NULL_TREE;
9332 arg = fold_convert (type, arg);
9333 if (TREE_CODE (arg) == INTEGER_CST)
9334 return fold_abs_const (arg, type);
9335 return fold_build1 (ABS_EXPR, type, arg);
9338 /* Fold a call to builtin fmin or fmax. */
9340 static tree
9341 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9343 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9345 /* Calculate the result when the argument is a constant. */
9346 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9348 if (res)
9349 return res;
9351 /* If either argument is NaN, return the other one. Avoid the
9352 transformation if we get (and honor) a signalling NaN. Using
9353 omit_one_operand() ensures we create a non-lvalue. */
9354 if (TREE_CODE (arg0) == REAL_CST
9355 && real_isnan (&TREE_REAL_CST (arg0))
9356 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9357 || ! TREE_REAL_CST (arg0).signalling))
9358 return omit_one_operand (type, arg1, arg0);
9359 if (TREE_CODE (arg1) == REAL_CST
9360 && real_isnan (&TREE_REAL_CST (arg1))
9361 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9362 || ! TREE_REAL_CST (arg1).signalling))
9363 return omit_one_operand (type, arg0, arg1);
9365 /* Transform fmin/fmax(x,x) -> x. */
9366 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9367 return omit_one_operand (type, arg0, arg1);
9369 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9370 functions to return the numeric arg if the other one is NaN.
9371 These tree codes don't honor that, so only transform if
9372 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9373 handled, so we don't have to worry about it either. */
9374 if (flag_finite_math_only)
9375 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9376 fold_convert (type, arg0),
9377 fold_convert (type, arg1));
9379 return NULL_TREE;
9382 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9384 static tree
9385 fold_builtin_carg (tree arg, tree type)
9387 if (validate_arg (arg, COMPLEX_TYPE))
9389 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9391 if (atan2_fn)
9393 tree new_arg = builtin_save_expr (arg);
9394 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9395 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9396 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9400 return NULL_TREE;
9403 /* Fold a call to builtin logb/ilogb. */
9405 static tree
9406 fold_builtin_logb (tree arg, tree rettype)
9408 if (! validate_arg (arg, REAL_TYPE))
9409 return NULL_TREE;
9411 STRIP_NOPS (arg);
9413 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9415 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9417 switch (value->cl)
9419 case rvc_nan:
9420 case rvc_inf:
9421 /* If arg is Inf or NaN and we're logb, return it. */
9422 if (TREE_CODE (rettype) == REAL_TYPE)
9423 return fold_convert (rettype, arg);
9424 /* Fall through... */
9425 case rvc_zero:
9426 /* Zero may set errno and/or raise an exception for logb, also
9427 for ilogb we don't know FP_ILOGB0. */
9428 return NULL_TREE;
9429 case rvc_normal:
9430 /* For normal numbers, proceed iff radix == 2. In GCC,
9431 normalized significands are in the range [0.5, 1.0). We
9432 want the exponent as if they were [1.0, 2.0) so get the
9433 exponent and subtract 1. */
9434 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9435 return fold_convert (rettype, build_int_cst (NULL_TREE,
9436 REAL_EXP (value)-1));
9437 break;
9441 return NULL_TREE;
9444 /* Fold a call to builtin significand, if radix == 2. */
9446 static tree
9447 fold_builtin_significand (tree arg, tree rettype)
9449 if (! validate_arg (arg, REAL_TYPE))
9450 return NULL_TREE;
9452 STRIP_NOPS (arg);
9454 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9456 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9458 switch (value->cl)
9460 case rvc_zero:
9461 case rvc_nan:
9462 case rvc_inf:
9463 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9464 return fold_convert (rettype, arg);
9465 case rvc_normal:
9466 /* For normal numbers, proceed iff radix == 2. */
9467 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9469 REAL_VALUE_TYPE result = *value;
9470 /* In GCC, normalized significands are in the range [0.5,
9471 1.0). We want them to be [1.0, 2.0) so set the
9472 exponent to 1. */
9473 SET_REAL_EXP (&result, 1);
9474 return build_real (rettype, result);
9476 break;
9480 return NULL_TREE;
9483 /* Fold a call to builtin frexp, we can assume the base is 2. */
9485 static tree
9486 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9488 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9489 return NULL_TREE;
9491 STRIP_NOPS (arg0);
9493 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9494 return NULL_TREE;
9496 arg1 = build_fold_indirect_ref (arg1);
9498 /* Proceed if a valid pointer type was passed in. */
9499 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9501 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9502 tree frac, exp;
9504 switch (value->cl)
9506 case rvc_zero:
9507 /* For +-0, return (*exp = 0, +-0). */
9508 exp = integer_zero_node;
9509 frac = arg0;
9510 break;
9511 case rvc_nan:
9512 case rvc_inf:
9513 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9514 return omit_one_operand (rettype, arg0, arg1);
9515 case rvc_normal:
9517 /* Since the frexp function always expects base 2, and in
9518 GCC normalized significands are already in the range
9519 [0.5, 1.0), we have exactly what frexp wants. */
9520 REAL_VALUE_TYPE frac_rvt = *value;
9521 SET_REAL_EXP (&frac_rvt, 0);
9522 frac = build_real (rettype, frac_rvt);
9523 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9525 break;
9526 default:
9527 gcc_unreachable ();
9530 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9531 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9532 TREE_SIDE_EFFECTS (arg1) = 1;
9533 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9536 return NULL_TREE;
9539 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9540 then we can assume the base is two. If it's false, then we have to
9541 check the mode of the TYPE parameter in certain cases. */
9543 static tree
9544 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9546 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9548 STRIP_NOPS (arg0);
9549 STRIP_NOPS (arg1);
9551 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9552 if (real_zerop (arg0) || integer_zerop (arg1)
9553 || (TREE_CODE (arg0) == REAL_CST
9554 && !real_isfinite (&TREE_REAL_CST (arg0))))
9555 return omit_one_operand (type, arg0, arg1);
9557 /* If both arguments are constant, then try to evaluate it. */
9558 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9559 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9560 && host_integerp (arg1, 0))
9562 /* Bound the maximum adjustment to twice the range of the
9563 mode's valid exponents. Use abs to ensure the range is
9564 positive as a sanity check. */
9565 const long max_exp_adj = 2 *
9566 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9567 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9569 /* Get the user-requested adjustment. */
9570 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9572 /* The requested adjustment must be inside this range. This
9573 is a preliminary cap to avoid things like overflow, we
9574 may still fail to compute the result for other reasons. */
9575 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9577 REAL_VALUE_TYPE initial_result;
9579 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9581 /* Ensure we didn't overflow. */
9582 if (! real_isinf (&initial_result))
9584 const REAL_VALUE_TYPE trunc_result
9585 = real_value_truncate (TYPE_MODE (type), initial_result);
9587 /* Only proceed if the target mode can hold the
9588 resulting value. */
9589 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9590 return build_real (type, trunc_result);
9596 return NULL_TREE;
9599 /* Fold a call to builtin modf. */
9601 static tree
9602 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9604 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9605 return NULL_TREE;
9607 STRIP_NOPS (arg0);
9609 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9610 return NULL_TREE;
9612 arg1 = build_fold_indirect_ref (arg1);
9614 /* Proceed if a valid pointer type was passed in. */
9615 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9617 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9618 REAL_VALUE_TYPE trunc, frac;
9620 switch (value->cl)
9622 case rvc_nan:
9623 case rvc_zero:
9624 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9625 trunc = frac = *value;
9626 break;
9627 case rvc_inf:
9628 /* For +-Inf, return (*arg1 = arg0, +-0). */
9629 frac = dconst0;
9630 frac.sign = value->sign;
9631 trunc = *value;
9632 break;
9633 case rvc_normal:
9634 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9635 real_trunc (&trunc, VOIDmode, value);
9636 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9637 /* If the original number was negative and already
9638 integral, then the fractional part is -0.0. */
9639 if (value->sign && frac.cl == rvc_zero)
9640 frac.sign = value->sign;
9641 break;
9644 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9645 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9646 build_real (rettype, trunc));
9647 TREE_SIDE_EFFECTS (arg1) = 1;
9648 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9649 build_real (rettype, frac));
9652 return NULL_TREE;
9655 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9656 ARG is the argument for the call. */
9658 static tree
9659 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9661 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9662 REAL_VALUE_TYPE r;
9664 if (!validate_arg (arg, REAL_TYPE))
9665 return NULL_TREE;
9667 switch (builtin_index)
9669 case BUILT_IN_ISINF:
9670 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9671 return omit_one_operand (type, integer_zero_node, arg);
9673 if (TREE_CODE (arg) == REAL_CST)
9675 r = TREE_REAL_CST (arg);
9676 if (real_isinf (&r))
9677 return real_compare (GT_EXPR, &r, &dconst0)
9678 ? integer_one_node : integer_minus_one_node;
9679 else
9680 return integer_zero_node;
9683 return NULL_TREE;
9685 case BUILT_IN_ISINF_SIGN:
9687 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9688 /* In a boolean context, GCC will fold the inner COND_EXPR to
9689 1. So e.g. "if (isinf_sign(x))" would be folded to just
9690 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9691 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9692 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9693 tree tmp = NULL_TREE;
9695 arg = builtin_save_expr (arg);
9697 if (signbit_fn && isinf_fn)
9699 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9700 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9702 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9703 signbit_call, integer_zero_node);
9704 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9705 isinf_call, integer_zero_node);
9707 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9708 integer_minus_one_node, integer_one_node);
9709 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9710 integer_zero_node);
9713 return tmp;
9716 case BUILT_IN_ISFINITE:
9717 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9718 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9719 return omit_one_operand (type, integer_one_node, arg);
9721 if (TREE_CODE (arg) == REAL_CST)
9723 r = TREE_REAL_CST (arg);
9724 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9727 return NULL_TREE;
9729 case BUILT_IN_ISNAN:
9730 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9731 return omit_one_operand (type, integer_zero_node, arg);
9733 if (TREE_CODE (arg) == REAL_CST)
9735 r = TREE_REAL_CST (arg);
9736 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9739 arg = builtin_save_expr (arg);
9740 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9742 default:
9743 gcc_unreachable ();
9747 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9748 This builtin will generate code to return the appropriate floating
9749 point classification depending on the value of the floating point
9750 number passed in. The possible return values must be supplied as
9751 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9752 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9753 one floating point argument which is "type generic". */
9755 static tree
9756 fold_builtin_fpclassify (tree exp)
9758 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9759 arg, type, res, tmp;
9760 enum machine_mode mode;
9761 REAL_VALUE_TYPE r;
9762 char buf[128];
9764 /* Verify the required arguments in the original call. */
9765 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9766 INTEGER_TYPE, INTEGER_TYPE,
9767 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9768 return NULL_TREE;
9770 fp_nan = CALL_EXPR_ARG (exp, 0);
9771 fp_infinite = CALL_EXPR_ARG (exp, 1);
9772 fp_normal = CALL_EXPR_ARG (exp, 2);
9773 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9774 fp_zero = CALL_EXPR_ARG (exp, 4);
9775 arg = CALL_EXPR_ARG (exp, 5);
9776 type = TREE_TYPE (arg);
9777 mode = TYPE_MODE (type);
9778 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9780 /* fpclassify(x) ->
9781 isnan(x) ? FP_NAN :
9782 (fabs(x) == Inf ? FP_INFINITE :
9783 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9784 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9786 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9787 build_real (type, dconst0));
9788 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9790 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9791 real_from_string (&r, buf);
9792 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9793 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9795 if (HONOR_INFINITIES (mode))
9797 real_inf (&r);
9798 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9799 build_real (type, r));
9800 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9803 if (HONOR_NANS (mode))
9805 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9806 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9809 return res;
9812 /* Fold a call to an unordered comparison function such as
9813 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9814 being called and ARG0 and ARG1 are the arguments for the call.
9815 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9816 the opposite of the desired result. UNORDERED_CODE is used
9817 for modes that can hold NaNs and ORDERED_CODE is used for
9818 the rest. */
9820 static tree
9821 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9822 enum tree_code unordered_code,
9823 enum tree_code ordered_code)
9825 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9826 enum tree_code code;
9827 tree type0, type1;
9828 enum tree_code code0, code1;
9829 tree cmp_type = NULL_TREE;
9831 type0 = TREE_TYPE (arg0);
9832 type1 = TREE_TYPE (arg1);
9834 code0 = TREE_CODE (type0);
9835 code1 = TREE_CODE (type1);
9837 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9838 /* Choose the wider of two real types. */
9839 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9840 ? type0 : type1;
9841 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9842 cmp_type = type0;
9843 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9844 cmp_type = type1;
9846 arg0 = fold_convert (cmp_type, arg0);
9847 arg1 = fold_convert (cmp_type, arg1);
9849 if (unordered_code == UNORDERED_EXPR)
9851 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9852 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9853 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9856 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9857 : ordered_code;
9858 return fold_build1 (TRUTH_NOT_EXPR, type,
9859 fold_build2 (code, type, arg0, arg1));
9862 /* Fold a call to built-in function FNDECL with 0 arguments.
9863 IGNORE is true if the result of the function call is ignored. This
9864 function returns NULL_TREE if no simplification was possible. */
9866 static tree
9867 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9869 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9870 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9871 switch (fcode)
9873 CASE_FLT_FN (BUILT_IN_INF):
9874 case BUILT_IN_INFD32:
9875 case BUILT_IN_INFD64:
9876 case BUILT_IN_INFD128:
9877 return fold_builtin_inf (type, true);
9879 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9880 return fold_builtin_inf (type, false);
9882 case BUILT_IN_CLASSIFY_TYPE:
9883 return fold_builtin_classify_type (NULL_TREE);
9885 default:
9886 break;
9888 return NULL_TREE;
9891 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9892 IGNORE is true if the result of the function call is ignored. This
9893 function returns NULL_TREE if no simplification was possible. */
9895 static tree
9896 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9898 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9899 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9900 switch (fcode)
9903 case BUILT_IN_CONSTANT_P:
9905 tree val = fold_builtin_constant_p (arg0);
9907 /* Gimplification will pull the CALL_EXPR for the builtin out of
9908 an if condition. When not optimizing, we'll not CSE it back.
9909 To avoid link error types of regressions, return false now. */
9910 if (!val && !optimize)
9911 val = integer_zero_node;
9913 return val;
9916 case BUILT_IN_CLASSIFY_TYPE:
9917 return fold_builtin_classify_type (arg0);
9919 case BUILT_IN_STRLEN:
9920 return fold_builtin_strlen (arg0);
9922 CASE_FLT_FN (BUILT_IN_FABS):
9923 return fold_builtin_fabs (arg0, type);
9925 case BUILT_IN_ABS:
9926 case BUILT_IN_LABS:
9927 case BUILT_IN_LLABS:
9928 case BUILT_IN_IMAXABS:
9929 return fold_builtin_abs (arg0, type);
9931 CASE_FLT_FN (BUILT_IN_CONJ):
9932 if (validate_arg (arg0, COMPLEX_TYPE))
9933 return fold_build1 (CONJ_EXPR, type, arg0);
9934 break;
9936 CASE_FLT_FN (BUILT_IN_CREAL):
9937 if (validate_arg (arg0, COMPLEX_TYPE))
9938 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9939 break;
9941 CASE_FLT_FN (BUILT_IN_CIMAG):
9942 if (validate_arg (arg0, COMPLEX_TYPE))
9943 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9944 break;
9946 CASE_FLT_FN (BUILT_IN_CCOS):
9947 CASE_FLT_FN (BUILT_IN_CCOSH):
9948 /* These functions are "even", i.e. f(x) == f(-x). */
9949 if (validate_arg (arg0, COMPLEX_TYPE))
9951 tree narg = fold_strip_sign_ops (arg0);
9952 if (narg)
9953 return build_call_expr (fndecl, 1, narg);
9955 break;
9957 CASE_FLT_FN (BUILT_IN_CABS):
9958 return fold_builtin_cabs (arg0, type, fndecl);
9960 CASE_FLT_FN (BUILT_IN_CARG):
9961 return fold_builtin_carg (arg0, type);
9963 CASE_FLT_FN (BUILT_IN_SQRT):
9964 return fold_builtin_sqrt (arg0, type);
9966 CASE_FLT_FN (BUILT_IN_CBRT):
9967 return fold_builtin_cbrt (arg0, type);
9969 CASE_FLT_FN (BUILT_IN_ASIN):
9970 if (validate_arg (arg0, REAL_TYPE))
9971 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9972 &dconstm1, &dconst1, true);
9973 break;
9975 CASE_FLT_FN (BUILT_IN_ACOS):
9976 if (validate_arg (arg0, REAL_TYPE))
9977 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9978 &dconstm1, &dconst1, true);
9979 break;
9981 CASE_FLT_FN (BUILT_IN_ATAN):
9982 if (validate_arg (arg0, REAL_TYPE))
9983 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9984 break;
9986 CASE_FLT_FN (BUILT_IN_ASINH):
9987 if (validate_arg (arg0, REAL_TYPE))
9988 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9989 break;
9991 CASE_FLT_FN (BUILT_IN_ACOSH):
9992 if (validate_arg (arg0, REAL_TYPE))
9993 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9994 &dconst1, NULL, true);
9995 break;
9997 CASE_FLT_FN (BUILT_IN_ATANH):
9998 if (validate_arg (arg0, REAL_TYPE))
9999 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10000 &dconstm1, &dconst1, false);
10001 break;
10003 CASE_FLT_FN (BUILT_IN_SIN):
10004 if (validate_arg (arg0, REAL_TYPE))
10005 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10006 break;
10008 CASE_FLT_FN (BUILT_IN_COS):
10009 return fold_builtin_cos (arg0, type, fndecl);
10010 break;
10012 CASE_FLT_FN (BUILT_IN_TAN):
10013 return fold_builtin_tan (arg0, type);
10015 CASE_FLT_FN (BUILT_IN_CEXP):
10016 return fold_builtin_cexp (arg0, type);
10018 CASE_FLT_FN (BUILT_IN_CEXPI):
10019 if (validate_arg (arg0, REAL_TYPE))
10020 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10021 break;
10023 CASE_FLT_FN (BUILT_IN_SINH):
10024 if (validate_arg (arg0, REAL_TYPE))
10025 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10026 break;
10028 CASE_FLT_FN (BUILT_IN_COSH):
10029 return fold_builtin_cosh (arg0, type, fndecl);
10031 CASE_FLT_FN (BUILT_IN_TANH):
10032 if (validate_arg (arg0, REAL_TYPE))
10033 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10034 break;
10036 CASE_FLT_FN (BUILT_IN_ERF):
10037 if (validate_arg (arg0, REAL_TYPE))
10038 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10039 break;
10041 CASE_FLT_FN (BUILT_IN_ERFC):
10042 if (validate_arg (arg0, REAL_TYPE))
10043 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10044 break;
10046 CASE_FLT_FN (BUILT_IN_TGAMMA):
10047 if (validate_arg (arg0, REAL_TYPE))
10048 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10049 break;
10051 CASE_FLT_FN (BUILT_IN_EXP):
10052 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10054 CASE_FLT_FN (BUILT_IN_EXP2):
10055 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10057 CASE_FLT_FN (BUILT_IN_EXP10):
10058 CASE_FLT_FN (BUILT_IN_POW10):
10059 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10061 CASE_FLT_FN (BUILT_IN_EXPM1):
10062 if (validate_arg (arg0, REAL_TYPE))
10063 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10064 break;
10066 CASE_FLT_FN (BUILT_IN_LOG):
10067 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10069 CASE_FLT_FN (BUILT_IN_LOG2):
10070 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10072 CASE_FLT_FN (BUILT_IN_LOG10):
10073 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10075 CASE_FLT_FN (BUILT_IN_LOG1P):
10076 if (validate_arg (arg0, REAL_TYPE))
10077 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10078 &dconstm1, NULL, false);
10079 break;
10081 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10082 CASE_FLT_FN (BUILT_IN_J0):
10083 if (validate_arg (arg0, REAL_TYPE))
10084 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10085 NULL, NULL, 0);
10086 break;
10088 CASE_FLT_FN (BUILT_IN_J1):
10089 if (validate_arg (arg0, REAL_TYPE))
10090 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10091 NULL, NULL, 0);
10092 break;
10094 CASE_FLT_FN (BUILT_IN_Y0):
10095 if (validate_arg (arg0, REAL_TYPE))
10096 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10097 &dconst0, NULL, false);
10098 break;
10100 CASE_FLT_FN (BUILT_IN_Y1):
10101 if (validate_arg (arg0, REAL_TYPE))
10102 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10103 &dconst0, NULL, false);
10104 break;
10105 #endif
10107 CASE_FLT_FN (BUILT_IN_NAN):
10108 case BUILT_IN_NAND32:
10109 case BUILT_IN_NAND64:
10110 case BUILT_IN_NAND128:
10111 return fold_builtin_nan (arg0, type, true);
10113 CASE_FLT_FN (BUILT_IN_NANS):
10114 return fold_builtin_nan (arg0, type, false);
10116 CASE_FLT_FN (BUILT_IN_FLOOR):
10117 return fold_builtin_floor (fndecl, arg0);
10119 CASE_FLT_FN (BUILT_IN_CEIL):
10120 return fold_builtin_ceil (fndecl, arg0);
10122 CASE_FLT_FN (BUILT_IN_TRUNC):
10123 return fold_builtin_trunc (fndecl, arg0);
10125 CASE_FLT_FN (BUILT_IN_ROUND):
10126 return fold_builtin_round (fndecl, arg0);
10128 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10129 CASE_FLT_FN (BUILT_IN_RINT):
10130 return fold_trunc_transparent_mathfn (fndecl, arg0);
10132 CASE_FLT_FN (BUILT_IN_LCEIL):
10133 CASE_FLT_FN (BUILT_IN_LLCEIL):
10134 CASE_FLT_FN (BUILT_IN_LFLOOR):
10135 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10136 CASE_FLT_FN (BUILT_IN_LROUND):
10137 CASE_FLT_FN (BUILT_IN_LLROUND):
10138 return fold_builtin_int_roundingfn (fndecl, arg0);
10140 CASE_FLT_FN (BUILT_IN_LRINT):
10141 CASE_FLT_FN (BUILT_IN_LLRINT):
10142 return fold_fixed_mathfn (fndecl, arg0);
10144 case BUILT_IN_BSWAP32:
10145 case BUILT_IN_BSWAP64:
10146 return fold_builtin_bswap (fndecl, arg0);
10148 CASE_INT_FN (BUILT_IN_FFS):
10149 CASE_INT_FN (BUILT_IN_CLZ):
10150 CASE_INT_FN (BUILT_IN_CTZ):
10151 CASE_INT_FN (BUILT_IN_POPCOUNT):
10152 CASE_INT_FN (BUILT_IN_PARITY):
10153 return fold_builtin_bitop (fndecl, arg0);
10155 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10156 return fold_builtin_signbit (arg0, type);
10158 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10159 return fold_builtin_significand (arg0, type);
10161 CASE_FLT_FN (BUILT_IN_ILOGB):
10162 CASE_FLT_FN (BUILT_IN_LOGB):
10163 return fold_builtin_logb (arg0, type);
10165 case BUILT_IN_ISASCII:
10166 return fold_builtin_isascii (arg0);
10168 case BUILT_IN_TOASCII:
10169 return fold_builtin_toascii (arg0);
10171 case BUILT_IN_ISDIGIT:
10172 return fold_builtin_isdigit (arg0);
10174 CASE_FLT_FN (BUILT_IN_FINITE):
10175 case BUILT_IN_FINITED32:
10176 case BUILT_IN_FINITED64:
10177 case BUILT_IN_FINITED128:
10178 case BUILT_IN_ISFINITE:
10179 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10181 CASE_FLT_FN (BUILT_IN_ISINF):
10182 case BUILT_IN_ISINFD32:
10183 case BUILT_IN_ISINFD64:
10184 case BUILT_IN_ISINFD128:
10185 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10187 case BUILT_IN_ISINF_SIGN:
10188 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10190 CASE_FLT_FN (BUILT_IN_ISNAN):
10191 case BUILT_IN_ISNAND32:
10192 case BUILT_IN_ISNAND64:
10193 case BUILT_IN_ISNAND128:
10194 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10196 case BUILT_IN_PRINTF:
10197 case BUILT_IN_PRINTF_UNLOCKED:
10198 case BUILT_IN_VPRINTF:
10199 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10201 default:
10202 break;
10205 return NULL_TREE;
10209 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10210 IGNORE is true if the result of the function call is ignored. This
10211 function returns NULL_TREE if no simplification was possible. */
10213 static tree
10214 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10216 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10217 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10219 switch (fcode)
10221 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10222 CASE_FLT_FN (BUILT_IN_JN):
10223 if (validate_arg (arg0, INTEGER_TYPE)
10224 && validate_arg (arg1, REAL_TYPE))
10225 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10226 break;
10228 CASE_FLT_FN (BUILT_IN_YN):
10229 if (validate_arg (arg0, INTEGER_TYPE)
10230 && validate_arg (arg1, REAL_TYPE))
10231 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10232 &dconst0, false);
10233 break;
10235 CASE_FLT_FN (BUILT_IN_DREM):
10236 CASE_FLT_FN (BUILT_IN_REMAINDER):
10237 if (validate_arg (arg0, REAL_TYPE)
10238 && validate_arg(arg1, REAL_TYPE))
10239 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10240 break;
10242 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10243 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10244 if (validate_arg (arg0, REAL_TYPE)
10245 && validate_arg(arg1, POINTER_TYPE))
10246 return do_mpfr_lgamma_r (arg0, arg1, type);
10247 break;
10248 #endif
10250 CASE_FLT_FN (BUILT_IN_ATAN2):
10251 if (validate_arg (arg0, REAL_TYPE)
10252 && validate_arg(arg1, REAL_TYPE))
10253 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10254 break;
10256 CASE_FLT_FN (BUILT_IN_FDIM):
10257 if (validate_arg (arg0, REAL_TYPE)
10258 && validate_arg(arg1, REAL_TYPE))
10259 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10260 break;
10262 CASE_FLT_FN (BUILT_IN_HYPOT):
10263 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10265 CASE_FLT_FN (BUILT_IN_LDEXP):
10266 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10267 CASE_FLT_FN (BUILT_IN_SCALBN):
10268 CASE_FLT_FN (BUILT_IN_SCALBLN):
10269 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10271 CASE_FLT_FN (BUILT_IN_FREXP):
10272 return fold_builtin_frexp (arg0, arg1, type);
10274 CASE_FLT_FN (BUILT_IN_MODF):
10275 return fold_builtin_modf (arg0, arg1, type);
10277 case BUILT_IN_BZERO:
10278 return fold_builtin_bzero (arg0, arg1, ignore);
10280 case BUILT_IN_FPUTS:
10281 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10283 case BUILT_IN_FPUTS_UNLOCKED:
10284 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10286 case BUILT_IN_STRSTR:
10287 return fold_builtin_strstr (arg0, arg1, type);
10289 case BUILT_IN_STRCAT:
10290 return fold_builtin_strcat (arg0, arg1);
10292 case BUILT_IN_STRSPN:
10293 return fold_builtin_strspn (arg0, arg1);
10295 case BUILT_IN_STRCSPN:
10296 return fold_builtin_strcspn (arg0, arg1);
10298 case BUILT_IN_STRCHR:
10299 case BUILT_IN_INDEX:
10300 return fold_builtin_strchr (arg0, arg1, type);
10302 case BUILT_IN_STRRCHR:
10303 case BUILT_IN_RINDEX:
10304 return fold_builtin_strrchr (arg0, arg1, type);
10306 case BUILT_IN_STRCPY:
10307 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10309 case BUILT_IN_STRCMP:
10310 return fold_builtin_strcmp (arg0, arg1);
10312 case BUILT_IN_STRPBRK:
10313 return fold_builtin_strpbrk (arg0, arg1, type);
10315 case BUILT_IN_EXPECT:
10316 return fold_builtin_expect (arg0, arg1);
10318 CASE_FLT_FN (BUILT_IN_POW):
10319 return fold_builtin_pow (fndecl, arg0, arg1, type);
10321 CASE_FLT_FN (BUILT_IN_POWI):
10322 return fold_builtin_powi (fndecl, arg0, arg1, type);
10324 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10325 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10327 CASE_FLT_FN (BUILT_IN_FMIN):
10328 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10330 CASE_FLT_FN (BUILT_IN_FMAX):
10331 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10333 case BUILT_IN_ISGREATER:
10334 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10335 case BUILT_IN_ISGREATEREQUAL:
10336 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10337 case BUILT_IN_ISLESS:
10338 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10339 case BUILT_IN_ISLESSEQUAL:
10340 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10341 case BUILT_IN_ISLESSGREATER:
10342 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10343 case BUILT_IN_ISUNORDERED:
10344 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10345 NOP_EXPR);
10347 /* We do the folding for va_start in the expander. */
10348 case BUILT_IN_VA_START:
10349 break;
10351 case BUILT_IN_SPRINTF:
10352 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10354 case BUILT_IN_OBJECT_SIZE:
10355 return fold_builtin_object_size (arg0, arg1);
10357 case BUILT_IN_PRINTF:
10358 case BUILT_IN_PRINTF_UNLOCKED:
10359 case BUILT_IN_VPRINTF:
10360 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10362 case BUILT_IN_PRINTF_CHK:
10363 case BUILT_IN_VPRINTF_CHK:
10364 if (!validate_arg (arg0, INTEGER_TYPE)
10365 || TREE_SIDE_EFFECTS (arg0))
10366 return NULL_TREE;
10367 else
10368 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10369 break;
10371 case BUILT_IN_FPRINTF:
10372 case BUILT_IN_FPRINTF_UNLOCKED:
10373 case BUILT_IN_VFPRINTF:
10374 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10375 ignore, fcode);
10377 default:
10378 break;
10380 return NULL_TREE;
10383 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10384 and ARG2. IGNORE is true if the result of the function call is ignored.
10385 This function returns NULL_TREE if no simplification was possible. */
10387 static tree
10388 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10390 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10391 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10392 switch (fcode)
10395 CASE_FLT_FN (BUILT_IN_SINCOS):
10396 return fold_builtin_sincos (arg0, arg1, arg2);
10398 CASE_FLT_FN (BUILT_IN_FMA):
10399 if (validate_arg (arg0, REAL_TYPE)
10400 && validate_arg(arg1, REAL_TYPE)
10401 && validate_arg(arg2, REAL_TYPE))
10402 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10403 break;
10405 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10406 CASE_FLT_FN (BUILT_IN_REMQUO):
10407 if (validate_arg (arg0, REAL_TYPE)
10408 && validate_arg(arg1, REAL_TYPE)
10409 && validate_arg(arg2, POINTER_TYPE))
10410 return do_mpfr_remquo (arg0, arg1, arg2);
10411 break;
10412 #endif
10414 case BUILT_IN_MEMSET:
10415 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10417 case BUILT_IN_BCOPY:
10418 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10420 case BUILT_IN_MEMCPY:
10421 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10423 case BUILT_IN_MEMPCPY:
10424 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10426 case BUILT_IN_MEMMOVE:
10427 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10429 case BUILT_IN_STRNCAT:
10430 return fold_builtin_strncat (arg0, arg1, arg2);
10432 case BUILT_IN_STRNCPY:
10433 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10435 case BUILT_IN_STRNCMP:
10436 return fold_builtin_strncmp (arg0, arg1, arg2);
10438 case BUILT_IN_MEMCHR:
10439 return fold_builtin_memchr (arg0, arg1, arg2, type);
10441 case BUILT_IN_BCMP:
10442 case BUILT_IN_MEMCMP:
10443 return fold_builtin_memcmp (arg0, arg1, arg2);;
10445 case BUILT_IN_SPRINTF:
10446 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10448 case BUILT_IN_STRCPY_CHK:
10449 case BUILT_IN_STPCPY_CHK:
10450 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10451 ignore, fcode);
10453 case BUILT_IN_STRCAT_CHK:
10454 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10456 case BUILT_IN_PRINTF_CHK:
10457 case BUILT_IN_VPRINTF_CHK:
10458 if (!validate_arg (arg0, INTEGER_TYPE)
10459 || TREE_SIDE_EFFECTS (arg0))
10460 return NULL_TREE;
10461 else
10462 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10463 break;
10465 case BUILT_IN_FPRINTF:
10466 case BUILT_IN_FPRINTF_UNLOCKED:
10467 case BUILT_IN_VFPRINTF:
10468 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10470 case BUILT_IN_FPRINTF_CHK:
10471 case BUILT_IN_VFPRINTF_CHK:
10472 if (!validate_arg (arg1, INTEGER_TYPE)
10473 || TREE_SIDE_EFFECTS (arg1))
10474 return NULL_TREE;
10475 else
10476 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10477 ignore, fcode);
10479 default:
10480 break;
10482 return NULL_TREE;
10485 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10486 ARG2, and ARG3. IGNORE is true if the result of the function call is
10487 ignored. This function returns NULL_TREE if no simplification was
10488 possible. */
10490 static tree
10491 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10492 bool ignore)
10494 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10496 switch (fcode)
10498 case BUILT_IN_MEMCPY_CHK:
10499 case BUILT_IN_MEMPCPY_CHK:
10500 case BUILT_IN_MEMMOVE_CHK:
10501 case BUILT_IN_MEMSET_CHK:
10502 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10503 NULL_TREE, ignore,
10504 DECL_FUNCTION_CODE (fndecl));
10506 case BUILT_IN_STRNCPY_CHK:
10507 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10509 case BUILT_IN_STRNCAT_CHK:
10510 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10512 case BUILT_IN_FPRINTF_CHK:
10513 case BUILT_IN_VFPRINTF_CHK:
10514 if (!validate_arg (arg1, INTEGER_TYPE)
10515 || TREE_SIDE_EFFECTS (arg1))
10516 return NULL_TREE;
10517 else
10518 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10519 ignore, fcode);
10520 break;
10522 default:
10523 break;
10525 return NULL_TREE;
10528 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10529 arguments, where NARGS <= 4. IGNORE is true if the result of the
10530 function call is ignored. This function returns NULL_TREE if no
10531 simplification was possible. Note that this only folds builtins with
10532 fixed argument patterns. Foldings that do varargs-to-varargs
10533 transformations, or that match calls with more than 4 arguments,
10534 need to be handled with fold_builtin_varargs instead. */
10536 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10538 static tree
10539 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10541 tree ret = NULL_TREE;
10543 switch (nargs)
10545 case 0:
10546 ret = fold_builtin_0 (fndecl, ignore);
10547 break;
10548 case 1:
10549 ret = fold_builtin_1 (fndecl, args[0], ignore);
10550 break;
10551 case 2:
10552 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10553 break;
10554 case 3:
10555 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10556 break;
10557 case 4:
10558 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10559 ignore);
10560 break;
10561 default:
10562 break;
10564 if (ret)
10566 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10567 TREE_NO_WARNING (ret) = 1;
10568 return ret;
10570 return NULL_TREE;
10573 /* Builtins with folding operations that operate on "..." arguments
10574 need special handling; we need to store the arguments in a convenient
10575 data structure before attempting any folding. Fortunately there are
10576 only a few builtins that fall into this category. FNDECL is the
10577 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10578 result of the function call is ignored. */
10580 static tree
10581 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10583 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10584 tree ret = NULL_TREE;
10586 switch (fcode)
10588 case BUILT_IN_SPRINTF_CHK:
10589 case BUILT_IN_VSPRINTF_CHK:
10590 ret = fold_builtin_sprintf_chk (exp, fcode);
10591 break;
10593 case BUILT_IN_SNPRINTF_CHK:
10594 case BUILT_IN_VSNPRINTF_CHK:
10595 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10596 break;
10598 case BUILT_IN_FPCLASSIFY:
10599 ret = fold_builtin_fpclassify (exp);
10600 break;
10602 default:
10603 break;
10605 if (ret)
10607 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10608 TREE_NO_WARNING (ret) = 1;
10609 return ret;
10611 return NULL_TREE;
10614 /* A wrapper function for builtin folding that prevents warnings for
10615 "statement without effect" and the like, caused by removing the
10616 call node earlier than the warning is generated. */
10618 tree
10619 fold_call_expr (tree exp, bool ignore)
10621 tree ret = NULL_TREE;
10622 tree fndecl = get_callee_fndecl (exp);
10623 if (fndecl
10624 && TREE_CODE (fndecl) == FUNCTION_DECL
10625 && DECL_BUILT_IN (fndecl)
10626 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10627 yet. Defer folding until we see all the arguments
10628 (after inlining). */
10629 && !CALL_EXPR_VA_ARG_PACK (exp))
10631 int nargs = call_expr_nargs (exp);
10633 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10634 instead last argument is __builtin_va_arg_pack (). Defer folding
10635 even in that case, until arguments are finalized. */
10636 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10638 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10639 if (fndecl2
10640 && TREE_CODE (fndecl2) == FUNCTION_DECL
10641 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10642 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10643 return NULL_TREE;
10646 /* FIXME: Don't use a list in this interface. */
10647 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10648 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10649 else
10651 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10653 tree *args = CALL_EXPR_ARGP (exp);
10654 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10656 if (!ret)
10657 ret = fold_builtin_varargs (fndecl, exp, ignore);
10658 if (ret)
10660 /* Propagate location information from original call to
10661 expansion of builtin. Otherwise things like
10662 maybe_emit_chk_warning, that operate on the expansion
10663 of a builtin, will use the wrong location information. */
10664 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10666 tree realret = ret;
10667 if (TREE_CODE (ret) == NOP_EXPR)
10668 realret = TREE_OPERAND (ret, 0);
10669 if (CAN_HAVE_LOCATION_P (realret)
10670 && !EXPR_HAS_LOCATION (realret))
10671 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10672 return realret;
10674 return ret;
10678 return NULL_TREE;
10681 /* Conveniently construct a function call expression. FNDECL names the
10682 function to be called and ARGLIST is a TREE_LIST of arguments. */
10684 tree
10685 build_function_call_expr (tree fndecl, tree arglist)
10687 tree fntype = TREE_TYPE (fndecl);
10688 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10689 int n = list_length (arglist);
10690 tree *argarray = (tree *) alloca (n * sizeof (tree));
10691 int i;
10693 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10694 argarray[i] = TREE_VALUE (arglist);
10695 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10698 /* Conveniently construct a function call expression. FNDECL names the
10699 function to be called, N is the number of arguments, and the "..."
10700 parameters are the argument expressions. */
10702 tree
10703 build_call_expr (tree fndecl, int n, ...)
10705 va_list ap;
10706 tree fntype = TREE_TYPE (fndecl);
10707 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10708 tree *argarray = (tree *) alloca (n * sizeof (tree));
10709 int i;
10711 va_start (ap, n);
10712 for (i = 0; i < n; i++)
10713 argarray[i] = va_arg (ap, tree);
10714 va_end (ap);
10715 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10718 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10719 N arguments are passed in the array ARGARRAY. */
10721 tree
10722 fold_builtin_call_array (tree type,
10723 tree fn,
10724 int n,
10725 tree *argarray)
10727 tree ret = NULL_TREE;
10728 int i;
10729 tree exp;
10731 if (TREE_CODE (fn) == ADDR_EXPR)
10733 tree fndecl = TREE_OPERAND (fn, 0);
10734 if (TREE_CODE (fndecl) == FUNCTION_DECL
10735 && DECL_BUILT_IN (fndecl))
10737 /* If last argument is __builtin_va_arg_pack (), arguments to this
10738 function are not finalized yet. Defer folding until they are. */
10739 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10741 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10742 if (fndecl2
10743 && TREE_CODE (fndecl2) == FUNCTION_DECL
10744 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10745 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10746 return build_call_array (type, fn, n, argarray);
10748 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10750 tree arglist = NULL_TREE;
10751 for (i = n - 1; i >= 0; i--)
10752 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10753 ret = targetm.fold_builtin (fndecl, arglist, false);
10754 if (ret)
10755 return ret;
10757 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10759 /* First try the transformations that don't require consing up
10760 an exp. */
10761 ret = fold_builtin_n (fndecl, argarray, n, false);
10762 if (ret)
10763 return ret;
10766 /* If we got this far, we need to build an exp. */
10767 exp = build_call_array (type, fn, n, argarray);
10768 ret = fold_builtin_varargs (fndecl, exp, false);
10769 return ret ? ret : exp;
10773 return build_call_array (type, fn, n, argarray);
10776 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10777 along with N new arguments specified as the "..." parameters. SKIP
10778 is the number of arguments in EXP to be omitted. This function is used
10779 to do varargs-to-varargs transformations. */
10781 static tree
10782 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10784 int oldnargs = call_expr_nargs (exp);
10785 int nargs = oldnargs - skip + n;
10786 tree fntype = TREE_TYPE (fndecl);
10787 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10788 tree *buffer;
10790 if (n > 0)
10792 int i, j;
10793 va_list ap;
10795 buffer = XALLOCAVEC (tree, nargs);
10796 va_start (ap, n);
10797 for (i = 0; i < n; i++)
10798 buffer[i] = va_arg (ap, tree);
10799 va_end (ap);
10800 for (j = skip; j < oldnargs; j++, i++)
10801 buffer[i] = CALL_EXPR_ARG (exp, j);
10803 else
10804 buffer = CALL_EXPR_ARGP (exp) + skip;
10806 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10809 /* Validate a single argument ARG against a tree code CODE representing
10810 a type. */
10812 static bool
10813 validate_arg (const_tree arg, enum tree_code code)
10815 if (!arg)
10816 return false;
10817 else if (code == POINTER_TYPE)
10818 return POINTER_TYPE_P (TREE_TYPE (arg));
10819 else if (code == INTEGER_TYPE)
10820 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10821 return code == TREE_CODE (TREE_TYPE (arg));
10824 /* This function validates the types of a function call argument list
10825 against a specified list of tree_codes. If the last specifier is a 0,
10826 that represents an ellipses, otherwise the last specifier must be a
10827 VOID_TYPE. */
10829 bool
10830 validate_arglist (const_tree callexpr, ...)
10832 enum tree_code code;
10833 bool res = 0;
10834 va_list ap;
10835 const_call_expr_arg_iterator iter;
10836 const_tree arg;
10838 va_start (ap, callexpr);
10839 init_const_call_expr_arg_iterator (callexpr, &iter);
10843 code = va_arg (ap, enum tree_code);
10844 switch (code)
10846 case 0:
10847 /* This signifies an ellipses, any further arguments are all ok. */
10848 res = true;
10849 goto end;
10850 case VOID_TYPE:
10851 /* This signifies an endlink, if no arguments remain, return
10852 true, otherwise return false. */
10853 res = !more_const_call_expr_args_p (&iter);
10854 goto end;
10855 default:
10856 /* If no parameters remain or the parameter's code does not
10857 match the specified code, return false. Otherwise continue
10858 checking any remaining arguments. */
10859 arg = next_const_call_expr_arg (&iter);
10860 if (!validate_arg (arg, code))
10861 goto end;
10862 break;
10865 while (1);
10867 /* We need gotos here since we can only have one VA_CLOSE in a
10868 function. */
10869 end: ;
10870 va_end (ap);
10872 return res;
10875 /* Default target-specific builtin expander that does nothing. */
10878 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10879 rtx target ATTRIBUTE_UNUSED,
10880 rtx subtarget ATTRIBUTE_UNUSED,
10881 enum machine_mode mode ATTRIBUTE_UNUSED,
10882 int ignore ATTRIBUTE_UNUSED)
10884 return NULL_RTX;
10887 /* Returns true is EXP represents data that would potentially reside
10888 in a readonly section. */
10890 static bool
10891 readonly_data_expr (tree exp)
10893 STRIP_NOPS (exp);
10895 if (TREE_CODE (exp) != ADDR_EXPR)
10896 return false;
10898 exp = get_base_address (TREE_OPERAND (exp, 0));
10899 if (!exp)
10900 return false;
10902 /* Make sure we call decl_readonly_section only for trees it
10903 can handle (since it returns true for everything it doesn't
10904 understand). */
10905 if (TREE_CODE (exp) == STRING_CST
10906 || TREE_CODE (exp) == CONSTRUCTOR
10907 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10908 return decl_readonly_section (exp, 0);
10909 else
10910 return false;
10913 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10914 to the call, and TYPE is its return type.
10916 Return NULL_TREE if no simplification was possible, otherwise return the
10917 simplified form of the call as a tree.
10919 The simplified form may be a constant or other expression which
10920 computes the same value, but in a more efficient manner (including
10921 calls to other builtin functions).
10923 The call may contain arguments which need to be evaluated, but
10924 which are not useful to determine the result of the call. In
10925 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10926 COMPOUND_EXPR will be an argument which must be evaluated.
10927 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10928 COMPOUND_EXPR in the chain will contain the tree for the simplified
10929 form of the builtin function call. */
10931 static tree
10932 fold_builtin_strstr (tree s1, tree s2, tree type)
10934 if (!validate_arg (s1, POINTER_TYPE)
10935 || !validate_arg (s2, POINTER_TYPE))
10936 return NULL_TREE;
10937 else
10939 tree fn;
10940 const char *p1, *p2;
10942 p2 = c_getstr (s2);
10943 if (p2 == NULL)
10944 return NULL_TREE;
10946 p1 = c_getstr (s1);
10947 if (p1 != NULL)
10949 const char *r = strstr (p1, p2);
10950 tree tem;
10952 if (r == NULL)
10953 return build_int_cst (TREE_TYPE (s1), 0);
10955 /* Return an offset into the constant string argument. */
10956 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10957 s1, size_int (r - p1));
10958 return fold_convert (type, tem);
10961 /* The argument is const char *, and the result is char *, so we need
10962 a type conversion here to avoid a warning. */
10963 if (p2[0] == '\0')
10964 return fold_convert (type, s1);
10966 if (p2[1] != '\0')
10967 return NULL_TREE;
10969 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10970 if (!fn)
10971 return NULL_TREE;
10973 /* New argument list transforming strstr(s1, s2) to
10974 strchr(s1, s2[0]). */
10975 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10979 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10980 the call, and TYPE is its return type.
10982 Return NULL_TREE if no simplification was possible, otherwise return the
10983 simplified form of the call as a tree.
10985 The simplified form may be a constant or other expression which
10986 computes the same value, but in a more efficient manner (including
10987 calls to other builtin functions).
10989 The call may contain arguments which need to be evaluated, but
10990 which are not useful to determine the result of the call. In
10991 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10992 COMPOUND_EXPR will be an argument which must be evaluated.
10993 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10994 COMPOUND_EXPR in the chain will contain the tree for the simplified
10995 form of the builtin function call. */
10997 static tree
10998 fold_builtin_strchr (tree s1, tree s2, tree type)
11000 if (!validate_arg (s1, POINTER_TYPE)
11001 || !validate_arg (s2, INTEGER_TYPE))
11002 return NULL_TREE;
11003 else
11005 const char *p1;
11007 if (TREE_CODE (s2) != INTEGER_CST)
11008 return NULL_TREE;
11010 p1 = c_getstr (s1);
11011 if (p1 != NULL)
11013 char c;
11014 const char *r;
11015 tree tem;
11017 if (target_char_cast (s2, &c))
11018 return NULL_TREE;
11020 r = strchr (p1, c);
11022 if (r == NULL)
11023 return build_int_cst (TREE_TYPE (s1), 0);
11025 /* Return an offset into the constant string argument. */
11026 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11027 s1, size_int (r - p1));
11028 return fold_convert (type, tem);
11030 return NULL_TREE;
11034 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11035 the call, and TYPE is its return type.
11037 Return NULL_TREE if no simplification was possible, otherwise return the
11038 simplified form of the call as a tree.
11040 The simplified form may be a constant or other expression which
11041 computes the same value, but in a more efficient manner (including
11042 calls to other builtin functions).
11044 The call may contain arguments which need to be evaluated, but
11045 which are not useful to determine the result of the call. In
11046 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11047 COMPOUND_EXPR will be an argument which must be evaluated.
11048 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11049 COMPOUND_EXPR in the chain will contain the tree for the simplified
11050 form of the builtin function call. */
11052 static tree
11053 fold_builtin_strrchr (tree s1, tree s2, tree type)
11055 if (!validate_arg (s1, POINTER_TYPE)
11056 || !validate_arg (s2, INTEGER_TYPE))
11057 return NULL_TREE;
11058 else
11060 tree fn;
11061 const char *p1;
11063 if (TREE_CODE (s2) != INTEGER_CST)
11064 return NULL_TREE;
11066 p1 = c_getstr (s1);
11067 if (p1 != NULL)
11069 char c;
11070 const char *r;
11071 tree tem;
11073 if (target_char_cast (s2, &c))
11074 return NULL_TREE;
11076 r = strrchr (p1, c);
11078 if (r == NULL)
11079 return build_int_cst (TREE_TYPE (s1), 0);
11081 /* Return an offset into the constant string argument. */
11082 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11083 s1, size_int (r - p1));
11084 return fold_convert (type, tem);
11087 if (! integer_zerop (s2))
11088 return NULL_TREE;
11090 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11091 if (!fn)
11092 return NULL_TREE;
11094 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11095 return build_call_expr (fn, 2, s1, s2);
11099 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11100 to the call, and TYPE is its return type.
11102 Return NULL_TREE if no simplification was possible, otherwise return the
11103 simplified form of the call as a tree.
11105 The simplified form may be a constant or other expression which
11106 computes the same value, but in a more efficient manner (including
11107 calls to other builtin functions).
11109 The call may contain arguments which need to be evaluated, but
11110 which are not useful to determine the result of the call. In
11111 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11112 COMPOUND_EXPR will be an argument which must be evaluated.
11113 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11114 COMPOUND_EXPR in the chain will contain the tree for the simplified
11115 form of the builtin function call. */
11117 static tree
11118 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11120 if (!validate_arg (s1, POINTER_TYPE)
11121 || !validate_arg (s2, POINTER_TYPE))
11122 return NULL_TREE;
11123 else
11125 tree fn;
11126 const char *p1, *p2;
11128 p2 = c_getstr (s2);
11129 if (p2 == NULL)
11130 return NULL_TREE;
11132 p1 = c_getstr (s1);
11133 if (p1 != NULL)
11135 const char *r = strpbrk (p1, p2);
11136 tree tem;
11138 if (r == NULL)
11139 return build_int_cst (TREE_TYPE (s1), 0);
11141 /* Return an offset into the constant string argument. */
11142 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11143 s1, size_int (r - p1));
11144 return fold_convert (type, tem);
11147 if (p2[0] == '\0')
11148 /* strpbrk(x, "") == NULL.
11149 Evaluate and ignore s1 in case it had side-effects. */
11150 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11152 if (p2[1] != '\0')
11153 return NULL_TREE; /* Really call strpbrk. */
11155 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11156 if (!fn)
11157 return NULL_TREE;
11159 /* New argument list transforming strpbrk(s1, s2) to
11160 strchr(s1, s2[0]). */
11161 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11165 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11166 to the call.
11168 Return NULL_TREE if no simplification was possible, otherwise return the
11169 simplified form of the call as a tree.
11171 The simplified form may be a constant or other expression which
11172 computes the same value, but in a more efficient manner (including
11173 calls to other builtin functions).
11175 The call may contain arguments which need to be evaluated, but
11176 which are not useful to determine the result of the call. In
11177 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11178 COMPOUND_EXPR will be an argument which must be evaluated.
11179 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11180 COMPOUND_EXPR in the chain will contain the tree for the simplified
11181 form of the builtin function call. */
11183 static tree
11184 fold_builtin_strcat (tree dst, tree src)
11186 if (!validate_arg (dst, POINTER_TYPE)
11187 || !validate_arg (src, POINTER_TYPE))
11188 return NULL_TREE;
11189 else
11191 const char *p = c_getstr (src);
11193 /* If the string length is zero, return the dst parameter. */
11194 if (p && *p == '\0')
11195 return dst;
11197 return NULL_TREE;
11201 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11202 arguments to the call.
11204 Return NULL_TREE if no simplification was possible, otherwise return the
11205 simplified form of the call as a tree.
11207 The simplified form may be a constant or other expression which
11208 computes the same value, but in a more efficient manner (including
11209 calls to other builtin functions).
11211 The call may contain arguments which need to be evaluated, but
11212 which are not useful to determine the result of the call. In
11213 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11214 COMPOUND_EXPR will be an argument which must be evaluated.
11215 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11216 COMPOUND_EXPR in the chain will contain the tree for the simplified
11217 form of the builtin function call. */
11219 static tree
11220 fold_builtin_strncat (tree dst, tree src, tree len)
11222 if (!validate_arg (dst, POINTER_TYPE)
11223 || !validate_arg (src, POINTER_TYPE)
11224 || !validate_arg (len, INTEGER_TYPE))
11225 return NULL_TREE;
11226 else
11228 const char *p = c_getstr (src);
11230 /* If the requested length is zero, or the src parameter string
11231 length is zero, return the dst parameter. */
11232 if (integer_zerop (len) || (p && *p == '\0'))
11233 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11235 /* If the requested len is greater than or equal to the string
11236 length, call strcat. */
11237 if (TREE_CODE (len) == INTEGER_CST && p
11238 && compare_tree_int (len, strlen (p)) >= 0)
11240 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11242 /* If the replacement _DECL isn't initialized, don't do the
11243 transformation. */
11244 if (!fn)
11245 return NULL_TREE;
11247 return build_call_expr (fn, 2, dst, src);
11249 return NULL_TREE;
11253 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11254 to the call.
11256 Return NULL_TREE if no simplification was possible, otherwise return the
11257 simplified form of the call as a tree.
11259 The simplified form may be a constant or other expression which
11260 computes the same value, but in a more efficient manner (including
11261 calls to other builtin functions).
11263 The call may contain arguments which need to be evaluated, but
11264 which are not useful to determine the result of the call. In
11265 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11266 COMPOUND_EXPR will be an argument which must be evaluated.
11267 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11268 COMPOUND_EXPR in the chain will contain the tree for the simplified
11269 form of the builtin function call. */
11271 static tree
11272 fold_builtin_strspn (tree s1, tree s2)
11274 if (!validate_arg (s1, POINTER_TYPE)
11275 || !validate_arg (s2, POINTER_TYPE))
11276 return NULL_TREE;
11277 else
11279 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11281 /* If both arguments are constants, evaluate at compile-time. */
11282 if (p1 && p2)
11284 const size_t r = strspn (p1, p2);
11285 return size_int (r);
11288 /* If either argument is "", return NULL_TREE. */
11289 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11290 /* Evaluate and ignore both arguments in case either one has
11291 side-effects. */
11292 return omit_two_operands (integer_type_node, integer_zero_node,
11293 s1, s2);
11294 return NULL_TREE;
11298 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11299 to the call.
11301 Return NULL_TREE if no simplification was possible, otherwise return the
11302 simplified form of the call as a tree.
11304 The simplified form may be a constant or other expression which
11305 computes the same value, but in a more efficient manner (including
11306 calls to other builtin functions).
11308 The call may contain arguments which need to be evaluated, but
11309 which are not useful to determine the result of the call. In
11310 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11311 COMPOUND_EXPR will be an argument which must be evaluated.
11312 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11313 COMPOUND_EXPR in the chain will contain the tree for the simplified
11314 form of the builtin function call. */
11316 static tree
11317 fold_builtin_strcspn (tree s1, tree s2)
11319 if (!validate_arg (s1, POINTER_TYPE)
11320 || !validate_arg (s2, POINTER_TYPE))
11321 return NULL_TREE;
11322 else
11324 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11326 /* If both arguments are constants, evaluate at compile-time. */
11327 if (p1 && p2)
11329 const size_t r = strcspn (p1, p2);
11330 return size_int (r);
11333 /* If the first argument is "", return NULL_TREE. */
11334 if (p1 && *p1 == '\0')
11336 /* Evaluate and ignore argument s2 in case it has
11337 side-effects. */
11338 return omit_one_operand (integer_type_node,
11339 integer_zero_node, s2);
11342 /* If the second argument is "", return __builtin_strlen(s1). */
11343 if (p2 && *p2 == '\0')
11345 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11347 /* If the replacement _DECL isn't initialized, don't do the
11348 transformation. */
11349 if (!fn)
11350 return NULL_TREE;
11352 return build_call_expr (fn, 1, s1);
11354 return NULL_TREE;
11358 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11359 to the call. IGNORE is true if the value returned
11360 by the builtin will be ignored. UNLOCKED is true is true if this
11361 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11362 the known length of the string. Return NULL_TREE if no simplification
11363 was possible. */
11365 tree
11366 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11368 /* If we're using an unlocked function, assume the other unlocked
11369 functions exist explicitly. */
11370 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11371 : implicit_built_in_decls[BUILT_IN_FPUTC];
11372 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11373 : implicit_built_in_decls[BUILT_IN_FWRITE];
11375 /* If the return value is used, don't do the transformation. */
11376 if (!ignore)
11377 return NULL_TREE;
11379 /* Verify the arguments in the original call. */
11380 if (!validate_arg (arg0, POINTER_TYPE)
11381 || !validate_arg (arg1, POINTER_TYPE))
11382 return NULL_TREE;
11384 if (! len)
11385 len = c_strlen (arg0, 0);
11387 /* Get the length of the string passed to fputs. If the length
11388 can't be determined, punt. */
11389 if (!len
11390 || TREE_CODE (len) != INTEGER_CST)
11391 return NULL_TREE;
11393 switch (compare_tree_int (len, 1))
11395 case -1: /* length is 0, delete the call entirely . */
11396 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11398 case 0: /* length is 1, call fputc. */
11400 const char *p = c_getstr (arg0);
11402 if (p != NULL)
11404 if (fn_fputc)
11405 return build_call_expr (fn_fputc, 2,
11406 build_int_cst (NULL_TREE, p[0]), arg1);
11407 else
11408 return NULL_TREE;
11411 /* FALLTHROUGH */
11412 case 1: /* length is greater than 1, call fwrite. */
11414 /* If optimizing for size keep fputs. */
11415 if (optimize_size)
11416 return NULL_TREE;
11417 /* New argument list transforming fputs(string, stream) to
11418 fwrite(string, 1, len, stream). */
11419 if (fn_fwrite)
11420 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11421 else
11422 return NULL_TREE;
11424 default:
11425 gcc_unreachable ();
11427 return NULL_TREE;
11430 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11431 produced. False otherwise. This is done so that we don't output the error
11432 or warning twice or three times. */
11433 bool
11434 fold_builtin_next_arg (tree exp, bool va_start_p)
11436 tree fntype = TREE_TYPE (current_function_decl);
11437 int nargs = call_expr_nargs (exp);
11438 tree arg;
11440 if (TYPE_ARG_TYPES (fntype) == 0
11441 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11442 == void_type_node))
11444 error ("%<va_start%> used in function with fixed args");
11445 return true;
11448 if (va_start_p)
11450 if (va_start_p && (nargs != 2))
11452 error ("wrong number of arguments to function %<va_start%>");
11453 return true;
11455 arg = CALL_EXPR_ARG (exp, 1);
11457 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11458 when we checked the arguments and if needed issued a warning. */
11459 else
11461 if (nargs == 0)
11463 /* Evidently an out of date version of <stdarg.h>; can't validate
11464 va_start's second argument, but can still work as intended. */
11465 warning (0, "%<__builtin_next_arg%> called without an argument");
11466 return true;
11468 else if (nargs > 1)
11470 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11471 return true;
11473 arg = CALL_EXPR_ARG (exp, 0);
11476 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11477 or __builtin_next_arg (0) the first time we see it, after checking
11478 the arguments and if needed issuing a warning. */
11479 if (!integer_zerop (arg))
11481 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11483 /* Strip off all nops for the sake of the comparison. This
11484 is not quite the same as STRIP_NOPS. It does more.
11485 We must also strip off INDIRECT_EXPR for C++ reference
11486 parameters. */
11487 while (CONVERT_EXPR_P (arg)
11488 || TREE_CODE (arg) == INDIRECT_REF)
11489 arg = TREE_OPERAND (arg, 0);
11490 if (arg != last_parm)
11492 /* FIXME: Sometimes with the tree optimizers we can get the
11493 not the last argument even though the user used the last
11494 argument. We just warn and set the arg to be the last
11495 argument so that we will get wrong-code because of
11496 it. */
11497 warning (0, "second parameter of %<va_start%> not last named argument");
11499 /* We want to verify the second parameter just once before the tree
11500 optimizers are run and then avoid keeping it in the tree,
11501 as otherwise we could warn even for correct code like:
11502 void foo (int i, ...)
11503 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11504 if (va_start_p)
11505 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11506 else
11507 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11509 return false;
11513 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11514 ORIG may be null if this is a 2-argument call. We don't attempt to
11515 simplify calls with more than 3 arguments.
11517 Return NULL_TREE if no simplification was possible, otherwise return the
11518 simplified form of the call as a tree. If IGNORED is true, it means that
11519 the caller does not use the returned value of the function. */
11521 static tree
11522 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11524 tree call, retval;
11525 const char *fmt_str = NULL;
11527 /* Verify the required arguments in the original call. We deal with two
11528 types of sprintf() calls: 'sprintf (str, fmt)' and
11529 'sprintf (dest, "%s", orig)'. */
11530 if (!validate_arg (dest, POINTER_TYPE)
11531 || !validate_arg (fmt, POINTER_TYPE))
11532 return NULL_TREE;
11533 if (orig && !validate_arg (orig, POINTER_TYPE))
11534 return NULL_TREE;
11536 /* Check whether the format is a literal string constant. */
11537 fmt_str = c_getstr (fmt);
11538 if (fmt_str == NULL)
11539 return NULL_TREE;
11541 call = NULL_TREE;
11542 retval = NULL_TREE;
11544 if (!init_target_chars ())
11545 return NULL_TREE;
11547 /* If the format doesn't contain % args or %%, use strcpy. */
11548 if (strchr (fmt_str, target_percent) == NULL)
11550 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11552 if (!fn)
11553 return NULL_TREE;
11555 /* Don't optimize sprintf (buf, "abc", ptr++). */
11556 if (orig)
11557 return NULL_TREE;
11559 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11560 'format' is known to contain no % formats. */
11561 call = build_call_expr (fn, 2, dest, fmt);
11562 if (!ignored)
11563 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11566 /* If the format is "%s", use strcpy if the result isn't used. */
11567 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11569 tree fn;
11570 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11572 if (!fn)
11573 return NULL_TREE;
11575 /* Don't crash on sprintf (str1, "%s"). */
11576 if (!orig)
11577 return NULL_TREE;
11579 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11580 if (!ignored)
11582 retval = c_strlen (orig, 1);
11583 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11584 return NULL_TREE;
11586 call = build_call_expr (fn, 2, dest, orig);
11589 if (call && retval)
11591 retval = fold_convert
11592 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11593 retval);
11594 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11596 else
11597 return call;
11600 /* Expand a call EXP to __builtin_object_size. */
11603 expand_builtin_object_size (tree exp)
11605 tree ost;
11606 int object_size_type;
11607 tree fndecl = get_callee_fndecl (exp);
11609 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11611 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11612 exp, fndecl);
11613 expand_builtin_trap ();
11614 return const0_rtx;
11617 ost = CALL_EXPR_ARG (exp, 1);
11618 STRIP_NOPS (ost);
11620 if (TREE_CODE (ost) != INTEGER_CST
11621 || tree_int_cst_sgn (ost) < 0
11622 || compare_tree_int (ost, 3) > 0)
11624 error ("%Klast argument of %D is not integer constant between 0 and 3",
11625 exp, fndecl);
11626 expand_builtin_trap ();
11627 return const0_rtx;
11630 object_size_type = tree_low_cst (ost, 0);
11632 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11635 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11636 FCODE is the BUILT_IN_* to use.
11637 Return NULL_RTX if we failed; the caller should emit a normal call,
11638 otherwise try to get the result in TARGET, if convenient (and in
11639 mode MODE if that's convenient). */
11641 static rtx
11642 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11643 enum built_in_function fcode)
11645 tree dest, src, len, size;
11647 if (!validate_arglist (exp,
11648 POINTER_TYPE,
11649 fcode == BUILT_IN_MEMSET_CHK
11650 ? INTEGER_TYPE : POINTER_TYPE,
11651 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11652 return NULL_RTX;
11654 dest = CALL_EXPR_ARG (exp, 0);
11655 src = CALL_EXPR_ARG (exp, 1);
11656 len = CALL_EXPR_ARG (exp, 2);
11657 size = CALL_EXPR_ARG (exp, 3);
11659 if (! host_integerp (size, 1))
11660 return NULL_RTX;
11662 if (host_integerp (len, 1) || integer_all_onesp (size))
11664 tree fn;
11666 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11668 warning (0, "%Kcall to %D will always overflow destination buffer",
11669 exp, get_callee_fndecl (exp));
11670 return NULL_RTX;
11673 fn = NULL_TREE;
11674 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11675 mem{cpy,pcpy,move,set} is available. */
11676 switch (fcode)
11678 case BUILT_IN_MEMCPY_CHK:
11679 fn = built_in_decls[BUILT_IN_MEMCPY];
11680 break;
11681 case BUILT_IN_MEMPCPY_CHK:
11682 fn = built_in_decls[BUILT_IN_MEMPCPY];
11683 break;
11684 case BUILT_IN_MEMMOVE_CHK:
11685 fn = built_in_decls[BUILT_IN_MEMMOVE];
11686 break;
11687 case BUILT_IN_MEMSET_CHK:
11688 fn = built_in_decls[BUILT_IN_MEMSET];
11689 break;
11690 default:
11691 break;
11694 if (! fn)
11695 return NULL_RTX;
11697 fn = build_call_expr (fn, 3, dest, src, len);
11698 STRIP_TYPE_NOPS (fn);
11699 while (TREE_CODE (fn) == COMPOUND_EXPR)
11701 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11702 EXPAND_NORMAL);
11703 fn = TREE_OPERAND (fn, 1);
11705 if (TREE_CODE (fn) == CALL_EXPR)
11706 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11707 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11709 else if (fcode == BUILT_IN_MEMSET_CHK)
11710 return NULL_RTX;
11711 else
11713 unsigned int dest_align
11714 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11716 /* If DEST is not a pointer type, call the normal function. */
11717 if (dest_align == 0)
11718 return NULL_RTX;
11720 /* If SRC and DEST are the same (and not volatile), do nothing. */
11721 if (operand_equal_p (src, dest, 0))
11723 tree expr;
11725 if (fcode != BUILT_IN_MEMPCPY_CHK)
11727 /* Evaluate and ignore LEN in case it has side-effects. */
11728 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11729 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11732 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11733 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11736 /* __memmove_chk special case. */
11737 if (fcode == BUILT_IN_MEMMOVE_CHK)
11739 unsigned int src_align
11740 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11742 if (src_align == 0)
11743 return NULL_RTX;
11745 /* If src is categorized for a readonly section we can use
11746 normal __memcpy_chk. */
11747 if (readonly_data_expr (src))
11749 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11750 if (!fn)
11751 return NULL_RTX;
11752 fn = build_call_expr (fn, 4, dest, src, len, size);
11753 STRIP_TYPE_NOPS (fn);
11754 while (TREE_CODE (fn) == COMPOUND_EXPR)
11756 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11757 EXPAND_NORMAL);
11758 fn = TREE_OPERAND (fn, 1);
11760 if (TREE_CODE (fn) == CALL_EXPR)
11761 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11762 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11765 return NULL_RTX;
11769 /* Emit warning if a buffer overflow is detected at compile time. */
11771 static void
11772 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11774 int is_strlen = 0;
11775 tree len, size;
11777 switch (fcode)
11779 case BUILT_IN_STRCPY_CHK:
11780 case BUILT_IN_STPCPY_CHK:
11781 /* For __strcat_chk the warning will be emitted only if overflowing
11782 by at least strlen (dest) + 1 bytes. */
11783 case BUILT_IN_STRCAT_CHK:
11784 len = CALL_EXPR_ARG (exp, 1);
11785 size = CALL_EXPR_ARG (exp, 2);
11786 is_strlen = 1;
11787 break;
11788 case BUILT_IN_STRNCAT_CHK:
11789 case BUILT_IN_STRNCPY_CHK:
11790 len = CALL_EXPR_ARG (exp, 2);
11791 size = CALL_EXPR_ARG (exp, 3);
11792 break;
11793 case BUILT_IN_SNPRINTF_CHK:
11794 case BUILT_IN_VSNPRINTF_CHK:
11795 len = CALL_EXPR_ARG (exp, 1);
11796 size = CALL_EXPR_ARG (exp, 3);
11797 break;
11798 default:
11799 gcc_unreachable ();
11802 if (!len || !size)
11803 return;
11805 if (! host_integerp (size, 1) || integer_all_onesp (size))
11806 return;
11808 if (is_strlen)
11810 len = c_strlen (len, 1);
11811 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11812 return;
11814 else if (fcode == BUILT_IN_STRNCAT_CHK)
11816 tree src = CALL_EXPR_ARG (exp, 1);
11817 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11818 return;
11819 src = c_strlen (src, 1);
11820 if (! src || ! host_integerp (src, 1))
11822 warning (0, "%Kcall to %D might overflow destination buffer",
11823 exp, get_callee_fndecl (exp));
11824 return;
11826 else if (tree_int_cst_lt (src, size))
11827 return;
11829 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11830 return;
11832 warning (0, "%Kcall to %D will always overflow destination buffer",
11833 exp, get_callee_fndecl (exp));
11836 /* Emit warning if a buffer overflow is detected at compile time
11837 in __sprintf_chk/__vsprintf_chk calls. */
11839 static void
11840 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11842 tree dest, size, len, fmt, flag;
11843 const char *fmt_str;
11844 int nargs = call_expr_nargs (exp);
11846 /* Verify the required arguments in the original call. */
11848 if (nargs < 4)
11849 return;
11850 dest = CALL_EXPR_ARG (exp, 0);
11851 flag = CALL_EXPR_ARG (exp, 1);
11852 size = CALL_EXPR_ARG (exp, 2);
11853 fmt = CALL_EXPR_ARG (exp, 3);
11855 if (! host_integerp (size, 1) || integer_all_onesp (size))
11856 return;
11858 /* Check whether the format is a literal string constant. */
11859 fmt_str = c_getstr (fmt);
11860 if (fmt_str == NULL)
11861 return;
11863 if (!init_target_chars ())
11864 return;
11866 /* If the format doesn't contain % args or %%, we know its size. */
11867 if (strchr (fmt_str, target_percent) == 0)
11868 len = build_int_cstu (size_type_node, strlen (fmt_str));
11869 /* If the format is "%s" and first ... argument is a string literal,
11870 we know it too. */
11871 else if (fcode == BUILT_IN_SPRINTF_CHK
11872 && strcmp (fmt_str, target_percent_s) == 0)
11874 tree arg;
11876 if (nargs < 5)
11877 return;
11878 arg = CALL_EXPR_ARG (exp, 4);
11879 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11880 return;
11882 len = c_strlen (arg, 1);
11883 if (!len || ! host_integerp (len, 1))
11884 return;
11886 else
11887 return;
11889 if (! tree_int_cst_lt (len, size))
11891 warning (0, "%Kcall to %D will always overflow destination buffer",
11892 exp, get_callee_fndecl (exp));
11896 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11897 if possible. */
11899 tree
11900 fold_builtin_object_size (tree ptr, tree ost)
11902 tree ret = NULL_TREE;
11903 int object_size_type;
11905 if (!validate_arg (ptr, POINTER_TYPE)
11906 || !validate_arg (ost, INTEGER_TYPE))
11907 return NULL_TREE;
11909 STRIP_NOPS (ost);
11911 if (TREE_CODE (ost) != INTEGER_CST
11912 || tree_int_cst_sgn (ost) < 0
11913 || compare_tree_int (ost, 3) > 0)
11914 return NULL_TREE;
11916 object_size_type = tree_low_cst (ost, 0);
11918 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11919 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11920 and (size_t) 0 for types 2 and 3. */
11921 if (TREE_SIDE_EFFECTS (ptr))
11922 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11924 if (TREE_CODE (ptr) == ADDR_EXPR)
11925 ret = build_int_cstu (size_type_node,
11926 compute_builtin_object_size (ptr, object_size_type));
11928 else if (TREE_CODE (ptr) == SSA_NAME)
11930 unsigned HOST_WIDE_INT bytes;
11932 /* If object size is not known yet, delay folding until
11933 later. Maybe subsequent passes will help determining
11934 it. */
11935 bytes = compute_builtin_object_size (ptr, object_size_type);
11936 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11937 ? -1 : 0))
11938 ret = build_int_cstu (size_type_node, bytes);
11941 if (ret)
11943 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11944 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11945 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11946 ret = NULL_TREE;
11949 return ret;
11952 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11953 DEST, SRC, LEN, and SIZE are the arguments to the call.
11954 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11955 code of the builtin. If MAXLEN is not NULL, it is maximum length
11956 passed as third argument. */
11958 tree
11959 fold_builtin_memory_chk (tree fndecl,
11960 tree dest, tree src, tree len, tree size,
11961 tree maxlen, bool ignore,
11962 enum built_in_function fcode)
11964 tree fn;
11966 if (!validate_arg (dest, POINTER_TYPE)
11967 || !validate_arg (src,
11968 (fcode == BUILT_IN_MEMSET_CHK
11969 ? INTEGER_TYPE : POINTER_TYPE))
11970 || !validate_arg (len, INTEGER_TYPE)
11971 || !validate_arg (size, INTEGER_TYPE))
11972 return NULL_TREE;
11974 /* If SRC and DEST are the same (and not volatile), return DEST
11975 (resp. DEST+LEN for __mempcpy_chk). */
11976 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11978 if (fcode != BUILT_IN_MEMPCPY_CHK)
11979 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11980 else
11982 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11983 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11987 if (! host_integerp (size, 1))
11988 return NULL_TREE;
11990 if (! integer_all_onesp (size))
11992 if (! host_integerp (len, 1))
11994 /* If LEN is not constant, try MAXLEN too.
11995 For MAXLEN only allow optimizing into non-_ocs function
11996 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11997 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11999 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12001 /* (void) __mempcpy_chk () can be optimized into
12002 (void) __memcpy_chk (). */
12003 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12004 if (!fn)
12005 return NULL_TREE;
12007 return build_call_expr (fn, 4, dest, src, len, size);
12009 return NULL_TREE;
12012 else
12013 maxlen = len;
12015 if (tree_int_cst_lt (size, maxlen))
12016 return NULL_TREE;
12019 fn = NULL_TREE;
12020 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12021 mem{cpy,pcpy,move,set} is available. */
12022 switch (fcode)
12024 case BUILT_IN_MEMCPY_CHK:
12025 fn = built_in_decls[BUILT_IN_MEMCPY];
12026 break;
12027 case BUILT_IN_MEMPCPY_CHK:
12028 fn = built_in_decls[BUILT_IN_MEMPCPY];
12029 break;
12030 case BUILT_IN_MEMMOVE_CHK:
12031 fn = built_in_decls[BUILT_IN_MEMMOVE];
12032 break;
12033 case BUILT_IN_MEMSET_CHK:
12034 fn = built_in_decls[BUILT_IN_MEMSET];
12035 break;
12036 default:
12037 break;
12040 if (!fn)
12041 return NULL_TREE;
12043 return build_call_expr (fn, 3, dest, src, len);
12046 /* Fold a call to the __st[rp]cpy_chk builtin.
12047 DEST, SRC, and SIZE are the arguments to the call.
12048 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12049 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12050 strings passed as second argument. */
12052 tree
12053 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12054 tree maxlen, bool ignore,
12055 enum built_in_function fcode)
12057 tree len, fn;
12059 if (!validate_arg (dest, POINTER_TYPE)
12060 || !validate_arg (src, POINTER_TYPE)
12061 || !validate_arg (size, INTEGER_TYPE))
12062 return NULL_TREE;
12064 /* If SRC and DEST are the same (and not volatile), return DEST. */
12065 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12066 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12068 if (! host_integerp (size, 1))
12069 return NULL_TREE;
12071 if (! integer_all_onesp (size))
12073 len = c_strlen (src, 1);
12074 if (! len || ! host_integerp (len, 1))
12076 /* If LEN is not constant, try MAXLEN too.
12077 For MAXLEN only allow optimizing into non-_ocs function
12078 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12079 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12081 if (fcode == BUILT_IN_STPCPY_CHK)
12083 if (! ignore)
12084 return NULL_TREE;
12086 /* If return value of __stpcpy_chk is ignored,
12087 optimize into __strcpy_chk. */
12088 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12089 if (!fn)
12090 return NULL_TREE;
12092 return build_call_expr (fn, 3, dest, src, size);
12095 if (! len || TREE_SIDE_EFFECTS (len))
12096 return NULL_TREE;
12098 /* If c_strlen returned something, but not a constant,
12099 transform __strcpy_chk into __memcpy_chk. */
12100 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12101 if (!fn)
12102 return NULL_TREE;
12104 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12105 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12106 build_call_expr (fn, 4,
12107 dest, src, len, size));
12110 else
12111 maxlen = len;
12113 if (! tree_int_cst_lt (maxlen, size))
12114 return NULL_TREE;
12117 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12118 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12119 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12120 if (!fn)
12121 return NULL_TREE;
12123 return build_call_expr (fn, 2, dest, src);
12126 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12127 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12128 length passed as third argument. */
12130 tree
12131 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12132 tree maxlen)
12134 tree fn;
12136 if (!validate_arg (dest, POINTER_TYPE)
12137 || !validate_arg (src, POINTER_TYPE)
12138 || !validate_arg (len, INTEGER_TYPE)
12139 || !validate_arg (size, INTEGER_TYPE))
12140 return NULL_TREE;
12142 if (! host_integerp (size, 1))
12143 return NULL_TREE;
12145 if (! integer_all_onesp (size))
12147 if (! host_integerp (len, 1))
12149 /* If LEN is not constant, try MAXLEN too.
12150 For MAXLEN only allow optimizing into non-_ocs function
12151 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12152 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12153 return NULL_TREE;
12155 else
12156 maxlen = len;
12158 if (tree_int_cst_lt (size, maxlen))
12159 return NULL_TREE;
12162 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12163 fn = built_in_decls[BUILT_IN_STRNCPY];
12164 if (!fn)
12165 return NULL_TREE;
12167 return build_call_expr (fn, 3, dest, src, len);
12170 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12171 are the arguments to the call. */
12173 static tree
12174 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12176 tree fn;
12177 const char *p;
12179 if (!validate_arg (dest, POINTER_TYPE)
12180 || !validate_arg (src, POINTER_TYPE)
12181 || !validate_arg (size, INTEGER_TYPE))
12182 return NULL_TREE;
12184 p = c_getstr (src);
12185 /* If the SRC parameter is "", return DEST. */
12186 if (p && *p == '\0')
12187 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12189 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12190 return NULL_TREE;
12192 /* If __builtin_strcat_chk is used, assume strcat is available. */
12193 fn = built_in_decls[BUILT_IN_STRCAT];
12194 if (!fn)
12195 return NULL_TREE;
12197 return build_call_expr (fn, 2, dest, src);
12200 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12201 LEN, and SIZE. */
12203 static tree
12204 fold_builtin_strncat_chk (tree fndecl,
12205 tree dest, tree src, tree len, tree size)
12207 tree fn;
12208 const char *p;
12210 if (!validate_arg (dest, POINTER_TYPE)
12211 || !validate_arg (src, POINTER_TYPE)
12212 || !validate_arg (size, INTEGER_TYPE)
12213 || !validate_arg (size, INTEGER_TYPE))
12214 return NULL_TREE;
12216 p = c_getstr (src);
12217 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12218 if (p && *p == '\0')
12219 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12220 else if (integer_zerop (len))
12221 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12223 if (! host_integerp (size, 1))
12224 return NULL_TREE;
12226 if (! integer_all_onesp (size))
12228 tree src_len = c_strlen (src, 1);
12229 if (src_len
12230 && host_integerp (src_len, 1)
12231 && host_integerp (len, 1)
12232 && ! tree_int_cst_lt (len, src_len))
12234 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12235 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12236 if (!fn)
12237 return NULL_TREE;
12239 return build_call_expr (fn, 3, dest, src, size);
12241 return NULL_TREE;
12244 /* If __builtin_strncat_chk is used, assume strncat is available. */
12245 fn = built_in_decls[BUILT_IN_STRNCAT];
12246 if (!fn)
12247 return NULL_TREE;
12249 return build_call_expr (fn, 3, dest, src, len);
12252 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12253 a normal call should be emitted rather than expanding the function
12254 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12256 static tree
12257 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12259 tree dest, size, len, fn, fmt, flag;
12260 const char *fmt_str;
12261 int nargs = call_expr_nargs (exp);
12263 /* Verify the required arguments in the original call. */
12264 if (nargs < 4)
12265 return NULL_TREE;
12266 dest = CALL_EXPR_ARG (exp, 0);
12267 if (!validate_arg (dest, POINTER_TYPE))
12268 return NULL_TREE;
12269 flag = CALL_EXPR_ARG (exp, 1);
12270 if (!validate_arg (flag, INTEGER_TYPE))
12271 return NULL_TREE;
12272 size = CALL_EXPR_ARG (exp, 2);
12273 if (!validate_arg (size, INTEGER_TYPE))
12274 return NULL_TREE;
12275 fmt = CALL_EXPR_ARG (exp, 3);
12276 if (!validate_arg (fmt, POINTER_TYPE))
12277 return NULL_TREE;
12279 if (! host_integerp (size, 1))
12280 return NULL_TREE;
12282 len = NULL_TREE;
12284 if (!init_target_chars ())
12285 return NULL_TREE;
12287 /* Check whether the format is a literal string constant. */
12288 fmt_str = c_getstr (fmt);
12289 if (fmt_str != NULL)
12291 /* If the format doesn't contain % args or %%, we know the size. */
12292 if (strchr (fmt_str, target_percent) == 0)
12294 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12295 len = build_int_cstu (size_type_node, strlen (fmt_str));
12297 /* If the format is "%s" and first ... argument is a string literal,
12298 we know the size too. */
12299 else if (fcode == BUILT_IN_SPRINTF_CHK
12300 && strcmp (fmt_str, target_percent_s) == 0)
12302 tree arg;
12304 if (nargs == 5)
12306 arg = CALL_EXPR_ARG (exp, 4);
12307 if (validate_arg (arg, POINTER_TYPE))
12309 len = c_strlen (arg, 1);
12310 if (! len || ! host_integerp (len, 1))
12311 len = NULL_TREE;
12317 if (! integer_all_onesp (size))
12319 if (! len || ! tree_int_cst_lt (len, size))
12320 return NULL_TREE;
12323 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12324 or if format doesn't contain % chars or is "%s". */
12325 if (! integer_zerop (flag))
12327 if (fmt_str == NULL)
12328 return NULL_TREE;
12329 if (strchr (fmt_str, target_percent) != NULL
12330 && strcmp (fmt_str, target_percent_s))
12331 return NULL_TREE;
12334 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12335 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12336 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12337 if (!fn)
12338 return NULL_TREE;
12340 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12343 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12344 a normal call should be emitted rather than expanding the function
12345 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12346 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12347 passed as second argument. */
12349 tree
12350 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12351 enum built_in_function fcode)
12353 tree dest, size, len, fn, fmt, flag;
12354 const char *fmt_str;
12356 /* Verify the required arguments in the original call. */
12357 if (call_expr_nargs (exp) < 5)
12358 return NULL_TREE;
12359 dest = CALL_EXPR_ARG (exp, 0);
12360 if (!validate_arg (dest, POINTER_TYPE))
12361 return NULL_TREE;
12362 len = CALL_EXPR_ARG (exp, 1);
12363 if (!validate_arg (len, INTEGER_TYPE))
12364 return NULL_TREE;
12365 flag = CALL_EXPR_ARG (exp, 2);
12366 if (!validate_arg (flag, INTEGER_TYPE))
12367 return NULL_TREE;
12368 size = CALL_EXPR_ARG (exp, 3);
12369 if (!validate_arg (size, INTEGER_TYPE))
12370 return NULL_TREE;
12371 fmt = CALL_EXPR_ARG (exp, 4);
12372 if (!validate_arg (fmt, POINTER_TYPE))
12373 return NULL_TREE;
12375 if (! host_integerp (size, 1))
12376 return NULL_TREE;
12378 if (! integer_all_onesp (size))
12380 if (! host_integerp (len, 1))
12382 /* If LEN is not constant, try MAXLEN too.
12383 For MAXLEN only allow optimizing into non-_ocs function
12384 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12385 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12386 return NULL_TREE;
12388 else
12389 maxlen = len;
12391 if (tree_int_cst_lt (size, maxlen))
12392 return NULL_TREE;
12395 if (!init_target_chars ())
12396 return NULL_TREE;
12398 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12399 or if format doesn't contain % chars or is "%s". */
12400 if (! integer_zerop (flag))
12402 fmt_str = c_getstr (fmt);
12403 if (fmt_str == NULL)
12404 return NULL_TREE;
12405 if (strchr (fmt_str, target_percent) != NULL
12406 && strcmp (fmt_str, target_percent_s))
12407 return NULL_TREE;
12410 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12411 available. */
12412 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12413 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12414 if (!fn)
12415 return NULL_TREE;
12417 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12420 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12421 FMT and ARG are the arguments to the call; we don't fold cases with
12422 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12424 Return NULL_TREE if no simplification was possible, otherwise return the
12425 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12426 code of the function to be simplified. */
12428 static tree
12429 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12430 enum built_in_function fcode)
12432 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12433 const char *fmt_str = NULL;
12435 /* If the return value is used, don't do the transformation. */
12436 if (! ignore)
12437 return NULL_TREE;
12439 /* Verify the required arguments in the original call. */
12440 if (!validate_arg (fmt, POINTER_TYPE))
12441 return NULL_TREE;
12443 /* Check whether the format is a literal string constant. */
12444 fmt_str = c_getstr (fmt);
12445 if (fmt_str == NULL)
12446 return NULL_TREE;
12448 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12450 /* If we're using an unlocked function, assume the other
12451 unlocked functions exist explicitly. */
12452 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12453 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12455 else
12457 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12458 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12461 if (!init_target_chars ())
12462 return NULL_TREE;
12464 if (strcmp (fmt_str, target_percent_s) == 0
12465 || strchr (fmt_str, target_percent) == NULL)
12467 const char *str;
12469 if (strcmp (fmt_str, target_percent_s) == 0)
12471 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12472 return NULL_TREE;
12474 if (!arg || !validate_arg (arg, POINTER_TYPE))
12475 return NULL_TREE;
12477 str = c_getstr (arg);
12478 if (str == NULL)
12479 return NULL_TREE;
12481 else
12483 /* The format specifier doesn't contain any '%' characters. */
12484 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12485 && arg)
12486 return NULL_TREE;
12487 str = fmt_str;
12490 /* If the string was "", printf does nothing. */
12491 if (str[0] == '\0')
12492 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12494 /* If the string has length of 1, call putchar. */
12495 if (str[1] == '\0')
12497 /* Given printf("c"), (where c is any one character,)
12498 convert "c"[0] to an int and pass that to the replacement
12499 function. */
12500 newarg = build_int_cst (NULL_TREE, str[0]);
12501 if (fn_putchar)
12502 call = build_call_expr (fn_putchar, 1, newarg);
12504 else
12506 /* If the string was "string\n", call puts("string"). */
12507 size_t len = strlen (str);
12508 if ((unsigned char)str[len - 1] == target_newline)
12510 /* Create a NUL-terminated string that's one char shorter
12511 than the original, stripping off the trailing '\n'. */
12512 char *newstr = XALLOCAVEC (char, len);
12513 memcpy (newstr, str, len - 1);
12514 newstr[len - 1] = 0;
12516 newarg = build_string_literal (len, newstr);
12517 if (fn_puts)
12518 call = build_call_expr (fn_puts, 1, newarg);
12520 else
12521 /* We'd like to arrange to call fputs(string,stdout) here,
12522 but we need stdout and don't have a way to get it yet. */
12523 return NULL_TREE;
12527 /* The other optimizations can be done only on the non-va_list variants. */
12528 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12529 return NULL_TREE;
12531 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12532 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12534 if (!arg || !validate_arg (arg, POINTER_TYPE))
12535 return NULL_TREE;
12536 if (fn_puts)
12537 call = build_call_expr (fn_puts, 1, arg);
12540 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12541 else if (strcmp (fmt_str, target_percent_c) == 0)
12543 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12544 return NULL_TREE;
12545 if (fn_putchar)
12546 call = build_call_expr (fn_putchar, 1, arg);
12549 if (!call)
12550 return NULL_TREE;
12552 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12555 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12556 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12557 more than 3 arguments, and ARG may be null in the 2-argument case.
12559 Return NULL_TREE if no simplification was possible, otherwise return the
12560 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12561 code of the function to be simplified. */
12563 static tree
12564 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12565 enum built_in_function fcode)
12567 tree fn_fputc, fn_fputs, call = NULL_TREE;
12568 const char *fmt_str = NULL;
12570 /* If the return value is used, don't do the transformation. */
12571 if (! ignore)
12572 return NULL_TREE;
12574 /* Verify the required arguments in the original call. */
12575 if (!validate_arg (fp, POINTER_TYPE))
12576 return NULL_TREE;
12577 if (!validate_arg (fmt, POINTER_TYPE))
12578 return NULL_TREE;
12580 /* Check whether the format is a literal string constant. */
12581 fmt_str = c_getstr (fmt);
12582 if (fmt_str == NULL)
12583 return NULL_TREE;
12585 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12587 /* If we're using an unlocked function, assume the other
12588 unlocked functions exist explicitly. */
12589 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12590 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12592 else
12594 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12595 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12598 if (!init_target_chars ())
12599 return NULL_TREE;
12601 /* If the format doesn't contain % args or %%, use strcpy. */
12602 if (strchr (fmt_str, target_percent) == NULL)
12604 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12605 && arg)
12606 return NULL_TREE;
12608 /* If the format specifier was "", fprintf does nothing. */
12609 if (fmt_str[0] == '\0')
12611 /* If FP has side-effects, just wait until gimplification is
12612 done. */
12613 if (TREE_SIDE_EFFECTS (fp))
12614 return NULL_TREE;
12616 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12619 /* When "string" doesn't contain %, replace all cases of
12620 fprintf (fp, string) with fputs (string, fp). The fputs
12621 builtin will take care of special cases like length == 1. */
12622 if (fn_fputs)
12623 call = build_call_expr (fn_fputs, 2, fmt, fp);
12626 /* The other optimizations can be done only on the non-va_list variants. */
12627 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12628 return NULL_TREE;
12630 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12631 else if (strcmp (fmt_str, target_percent_s) == 0)
12633 if (!arg || !validate_arg (arg, POINTER_TYPE))
12634 return NULL_TREE;
12635 if (fn_fputs)
12636 call = build_call_expr (fn_fputs, 2, arg, fp);
12639 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12640 else if (strcmp (fmt_str, target_percent_c) == 0)
12642 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12643 return NULL_TREE;
12644 if (fn_fputc)
12645 call = build_call_expr (fn_fputc, 2, arg, fp);
12648 if (!call)
12649 return NULL_TREE;
12650 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12653 /* Initialize format string characters in the target charset. */
12655 static bool
12656 init_target_chars (void)
12658 static bool init;
12659 if (!init)
12661 target_newline = lang_hooks.to_target_charset ('\n');
12662 target_percent = lang_hooks.to_target_charset ('%');
12663 target_c = lang_hooks.to_target_charset ('c');
12664 target_s = lang_hooks.to_target_charset ('s');
12665 if (target_newline == 0 || target_percent == 0 || target_c == 0
12666 || target_s == 0)
12667 return false;
12669 target_percent_c[0] = target_percent;
12670 target_percent_c[1] = target_c;
12671 target_percent_c[2] = '\0';
12673 target_percent_s[0] = target_percent;
12674 target_percent_s[1] = target_s;
12675 target_percent_s[2] = '\0';
12677 target_percent_s_newline[0] = target_percent;
12678 target_percent_s_newline[1] = target_s;
12679 target_percent_s_newline[2] = target_newline;
12680 target_percent_s_newline[3] = '\0';
12682 init = true;
12684 return true;
12687 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12688 and no overflow/underflow occurred. INEXACT is true if M was not
12689 exactly calculated. TYPE is the tree type for the result. This
12690 function assumes that you cleared the MPFR flags and then
12691 calculated M to see if anything subsequently set a flag prior to
12692 entering this function. Return NULL_TREE if any checks fail. */
12694 static tree
12695 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12697 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12698 overflow/underflow occurred. If -frounding-math, proceed iff the
12699 result of calling FUNC was exact. */
12700 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12701 && (!flag_rounding_math || !inexact))
12703 REAL_VALUE_TYPE rr;
12705 real_from_mpfr (&rr, m, type, GMP_RNDN);
12706 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12707 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12708 but the mpft_t is not, then we underflowed in the
12709 conversion. */
12710 if (real_isfinite (&rr)
12711 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12713 REAL_VALUE_TYPE rmode;
12715 real_convert (&rmode, TYPE_MODE (type), &rr);
12716 /* Proceed iff the specified mode can hold the value. */
12717 if (real_identical (&rmode, &rr))
12718 return build_real (type, rmode);
12721 return NULL_TREE;
12724 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12725 FUNC on it and return the resulting value as a tree with type TYPE.
12726 If MIN and/or MAX are not NULL, then the supplied ARG must be
12727 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12728 acceptable values, otherwise they are not. The mpfr precision is
12729 set to the precision of TYPE. We assume that function FUNC returns
12730 zero if the result could be calculated exactly within the requested
12731 precision. */
12733 static tree
12734 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12735 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12736 bool inclusive)
12738 tree result = NULL_TREE;
12740 STRIP_NOPS (arg);
12742 /* To proceed, MPFR must exactly represent the target floating point
12743 format, which only happens when the target base equals two. */
12744 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12745 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12747 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12749 if (real_isfinite (ra)
12750 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12751 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12753 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12754 int inexact;
12755 mpfr_t m;
12757 mpfr_init2 (m, prec);
12758 mpfr_from_real (m, ra, GMP_RNDN);
12759 mpfr_clear_flags ();
12760 inexact = func (m, m, GMP_RNDN);
12761 result = do_mpfr_ckconv (m, type, inexact);
12762 mpfr_clear (m);
12766 return result;
12769 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12770 FUNC on it and return the resulting value as a tree with type TYPE.
12771 The mpfr precision is set to the precision of TYPE. We assume that
12772 function FUNC returns zero if the result could be calculated
12773 exactly within the requested precision. */
12775 static tree
12776 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12777 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12779 tree result = NULL_TREE;
12781 STRIP_NOPS (arg1);
12782 STRIP_NOPS (arg2);
12784 /* To proceed, MPFR must exactly represent the target floating point
12785 format, which only happens when the target base equals two. */
12786 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12787 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12788 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12790 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12791 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12793 if (real_isfinite (ra1) && real_isfinite (ra2))
12795 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12796 int inexact;
12797 mpfr_t m1, m2;
12799 mpfr_inits2 (prec, m1, m2, NULL);
12800 mpfr_from_real (m1, ra1, GMP_RNDN);
12801 mpfr_from_real (m2, ra2, GMP_RNDN);
12802 mpfr_clear_flags ();
12803 inexact = func (m1, m1, m2, GMP_RNDN);
12804 result = do_mpfr_ckconv (m1, type, inexact);
12805 mpfr_clears (m1, m2, NULL);
12809 return result;
12812 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12813 FUNC on it and return the resulting value as a tree with type TYPE.
12814 The mpfr precision is set to the precision of TYPE. We assume that
12815 function FUNC returns zero if the result could be calculated
12816 exactly within the requested precision. */
12818 static tree
12819 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12820 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12822 tree result = NULL_TREE;
12824 STRIP_NOPS (arg1);
12825 STRIP_NOPS (arg2);
12826 STRIP_NOPS (arg3);
12828 /* To proceed, MPFR must exactly represent the target floating point
12829 format, which only happens when the target base equals two. */
12830 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12831 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12832 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12833 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12835 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12836 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12837 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12839 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12841 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12842 int inexact;
12843 mpfr_t m1, m2, m3;
12845 mpfr_inits2 (prec, m1, m2, m3, NULL);
12846 mpfr_from_real (m1, ra1, GMP_RNDN);
12847 mpfr_from_real (m2, ra2, GMP_RNDN);
12848 mpfr_from_real (m3, ra3, GMP_RNDN);
12849 mpfr_clear_flags ();
12850 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12851 result = do_mpfr_ckconv (m1, type, inexact);
12852 mpfr_clears (m1, m2, m3, NULL);
12856 return result;
12859 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12860 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12861 If ARG_SINP and ARG_COSP are NULL then the result is returned
12862 as a complex value.
12863 The type is taken from the type of ARG and is used for setting the
12864 precision of the calculation and results. */
12866 static tree
12867 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12869 tree const type = TREE_TYPE (arg);
12870 tree result = NULL_TREE;
12872 STRIP_NOPS (arg);
12874 /* To proceed, MPFR must exactly represent the target floating point
12875 format, which only happens when the target base equals two. */
12876 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12877 && TREE_CODE (arg) == REAL_CST
12878 && !TREE_OVERFLOW (arg))
12880 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12882 if (real_isfinite (ra))
12884 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12885 tree result_s, result_c;
12886 int inexact;
12887 mpfr_t m, ms, mc;
12889 mpfr_inits2 (prec, m, ms, mc, NULL);
12890 mpfr_from_real (m, ra, GMP_RNDN);
12891 mpfr_clear_flags ();
12892 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12893 result_s = do_mpfr_ckconv (ms, type, inexact);
12894 result_c = do_mpfr_ckconv (mc, type, inexact);
12895 mpfr_clears (m, ms, mc, NULL);
12896 if (result_s && result_c)
12898 /* If we are to return in a complex value do so. */
12899 if (!arg_sinp && !arg_cosp)
12900 return build_complex (build_complex_type (type),
12901 result_c, result_s);
12903 /* Dereference the sin/cos pointer arguments. */
12904 arg_sinp = build_fold_indirect_ref (arg_sinp);
12905 arg_cosp = build_fold_indirect_ref (arg_cosp);
12906 /* Proceed if valid pointer type were passed in. */
12907 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12908 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12910 /* Set the values. */
12911 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12912 result_s);
12913 TREE_SIDE_EFFECTS (result_s) = 1;
12914 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12915 result_c);
12916 TREE_SIDE_EFFECTS (result_c) = 1;
12917 /* Combine the assignments into a compound expr. */
12918 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12919 result_s, result_c));
12924 return result;
12927 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12928 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12929 two-argument mpfr order N Bessel function FUNC on them and return
12930 the resulting value as a tree with type TYPE. The mpfr precision
12931 is set to the precision of TYPE. We assume that function FUNC
12932 returns zero if the result could be calculated exactly within the
12933 requested precision. */
12934 static tree
12935 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12936 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12937 const REAL_VALUE_TYPE *min, bool inclusive)
12939 tree result = NULL_TREE;
12941 STRIP_NOPS (arg1);
12942 STRIP_NOPS (arg2);
12944 /* To proceed, MPFR must exactly represent the target floating point
12945 format, which only happens when the target base equals two. */
12946 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12947 && host_integerp (arg1, 0)
12948 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12950 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12951 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12953 if (n == (long)n
12954 && real_isfinite (ra)
12955 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12957 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12958 int inexact;
12959 mpfr_t m;
12961 mpfr_init2 (m, prec);
12962 mpfr_from_real (m, ra, GMP_RNDN);
12963 mpfr_clear_flags ();
12964 inexact = func (m, n, m, GMP_RNDN);
12965 result = do_mpfr_ckconv (m, type, inexact);
12966 mpfr_clear (m);
12970 return result;
12973 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12974 the pointer *(ARG_QUO) and return the result. The type is taken
12975 from the type of ARG0 and is used for setting the precision of the
12976 calculation and results. */
12978 static tree
12979 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12981 tree const type = TREE_TYPE (arg0);
12982 tree result = NULL_TREE;
12984 STRIP_NOPS (arg0);
12985 STRIP_NOPS (arg1);
12987 /* To proceed, MPFR must exactly represent the target floating point
12988 format, which only happens when the target base equals two. */
12989 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12990 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12991 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12993 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12994 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12996 if (real_isfinite (ra0) && real_isfinite (ra1))
12998 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12999 tree result_rem;
13000 long integer_quo;
13001 mpfr_t m0, m1;
13003 mpfr_inits2 (prec, m0, m1, NULL);
13004 mpfr_from_real (m0, ra0, GMP_RNDN);
13005 mpfr_from_real (m1, ra1, GMP_RNDN);
13006 mpfr_clear_flags ();
13007 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
13008 /* Remquo is independent of the rounding mode, so pass
13009 inexact=0 to do_mpfr_ckconv(). */
13010 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13011 mpfr_clears (m0, m1, NULL);
13012 if (result_rem)
13014 /* MPFR calculates quo in the host's long so it may
13015 return more bits in quo than the target int can hold
13016 if sizeof(host long) > sizeof(target int). This can
13017 happen even for native compilers in LP64 mode. In
13018 these cases, modulo the quo value with the largest
13019 number that the target int can hold while leaving one
13020 bit for the sign. */
13021 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13022 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13024 /* Dereference the quo pointer argument. */
13025 arg_quo = build_fold_indirect_ref (arg_quo);
13026 /* Proceed iff a valid pointer type was passed in. */
13027 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13029 /* Set the value. */
13030 tree result_quo = fold_build2 (MODIFY_EXPR,
13031 TREE_TYPE (arg_quo), arg_quo,
13032 build_int_cst (NULL, integer_quo));
13033 TREE_SIDE_EFFECTS (result_quo) = 1;
13034 /* Combine the quo assignment with the rem. */
13035 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13036 result_quo, result_rem));
13041 return result;
13044 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13045 resulting value as a tree with type TYPE. The mpfr precision is
13046 set to the precision of TYPE. We assume that this mpfr function
13047 returns zero if the result could be calculated exactly within the
13048 requested precision. In addition, the integer pointer represented
13049 by ARG_SG will be dereferenced and set to the appropriate signgam
13050 (-1,1) value. */
13052 static tree
13053 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13055 tree result = NULL_TREE;
13057 STRIP_NOPS (arg);
13059 /* To proceed, MPFR must exactly represent the target floating point
13060 format, which only happens when the target base equals two. Also
13061 verify ARG is a constant and that ARG_SG is an int pointer. */
13062 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13063 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13064 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13065 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13067 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13069 /* In addition to NaN and Inf, the argument cannot be zero or a
13070 negative integer. */
13071 if (real_isfinite (ra)
13072 && ra->cl != rvc_zero
13073 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13075 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13076 int inexact, sg;
13077 mpfr_t m;
13078 tree result_lg;
13080 mpfr_init2 (m, prec);
13081 mpfr_from_real (m, ra, GMP_RNDN);
13082 mpfr_clear_flags ();
13083 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
13084 result_lg = do_mpfr_ckconv (m, type, inexact);
13085 mpfr_clear (m);
13086 if (result_lg)
13088 tree result_sg;
13090 /* Dereference the arg_sg pointer argument. */
13091 arg_sg = build_fold_indirect_ref (arg_sg);
13092 /* Assign the signgam value into *arg_sg. */
13093 result_sg = fold_build2 (MODIFY_EXPR,
13094 TREE_TYPE (arg_sg), arg_sg,
13095 build_int_cst (NULL, sg));
13096 TREE_SIDE_EFFECTS (result_sg) = 1;
13097 /* Combine the signgam assignment with the lgamma result. */
13098 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13099 result_sg, result_lg));
13104 return result;
13106 #endif