Update my e-mail address for new employer.
[official-gcc.git] / gcc / builtins.c
blobb4f61868205ce9097607ef46c33860b22aec6fa6
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "tree-gimple.h"
32 #include "flags.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "except.h"
36 #include "function.h"
37 #include "insn-config.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "predict.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
67 #undef DEF_BUILTIN
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
85 #endif
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
129 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
130 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
131 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
133 static rtx expand_builtin_bzero (tree);
134 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_alloca (tree, rtx);
140 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
141 static rtx expand_builtin_frame_address (tree, tree);
142 static rtx expand_builtin_fputs (tree, rtx, bool);
143 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
145 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
146 static tree stabilize_va_list (tree, int);
147 static rtx expand_builtin_expect (tree, rtx);
148 static tree fold_builtin_constant_p (tree);
149 static tree fold_builtin_expect (tree);
150 static tree fold_builtin_classify_type (tree);
151 static tree fold_builtin_strlen (tree);
152 static tree fold_builtin_inf (tree, int);
153 static tree fold_builtin_nan (tree, tree, int);
154 static tree rewrite_call_expr (tree, int, tree, int, ...);
155 static bool validate_arg (tree, enum tree_code code);
156 static bool integer_valued_real_p (tree);
157 static tree fold_trunc_transparent_mathfn (tree, tree);
158 static bool readonly_data_expr (tree);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_sqrt (tree, tree);
162 static tree fold_builtin_cbrt (tree, tree);
163 static tree fold_builtin_pow (tree, tree, tree, tree);
164 static tree fold_builtin_powi (tree, tree, tree, tree);
165 static tree fold_builtin_cos (tree, tree, tree);
166 static tree fold_builtin_cosh (tree, tree, tree);
167 static tree fold_builtin_tan (tree, tree);
168 static tree fold_builtin_trunc (tree, tree);
169 static tree fold_builtin_floor (tree, tree);
170 static tree fold_builtin_ceil (tree, tree);
171 static tree fold_builtin_round (tree, tree);
172 static tree fold_builtin_int_roundingfn (tree, tree);
173 static tree fold_builtin_bitop (tree, tree);
174 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
175 static tree fold_builtin_strchr (tree, tree, tree);
176 static tree fold_builtin_memchr (tree, tree, tree, tree);
177 static tree fold_builtin_memcmp (tree, tree, tree);
178 static tree fold_builtin_strcmp (tree, tree);
179 static tree fold_builtin_strncmp (tree, tree, tree);
180 static tree fold_builtin_signbit (tree, tree);
181 static tree fold_builtin_copysign (tree, tree, tree, tree);
182 static tree fold_builtin_isascii (tree);
183 static tree fold_builtin_toascii (tree);
184 static tree fold_builtin_isdigit (tree);
185 static tree fold_builtin_fabs (tree, tree);
186 static tree fold_builtin_abs (tree, tree);
187 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 enum tree_code);
189 static tree fold_builtin_n (tree, tree *, int, bool);
190 static tree fold_builtin_0 (tree, bool);
191 static tree fold_builtin_1 (tree, tree, bool);
192 static tree fold_builtin_2 (tree, tree, tree, bool);
193 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
194 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
195 static tree fold_builtin_varargs (tree, tree, bool);
197 static tree fold_builtin_strpbrk (tree, tree, tree);
198 static tree fold_builtin_strstr (tree, tree, tree);
199 static tree fold_builtin_strrchr (tree, tree, tree);
200 static tree fold_builtin_strcat (tree, tree);
201 static tree fold_builtin_strncat (tree, tree, tree);
202 static tree fold_builtin_strspn (tree, tree);
203 static tree fold_builtin_strcspn (tree, tree);
204 static tree fold_builtin_sprintf (tree, tree, tree, int);
206 static rtx expand_builtin_object_size (tree);
207 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
208 enum built_in_function);
209 static void maybe_emit_chk_warning (tree, enum built_in_function);
210 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
211 static tree fold_builtin_object_size (tree, tree);
212 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
213 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
214 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
215 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
216 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
217 enum built_in_function);
218 static bool init_target_chars (void);
220 static unsigned HOST_WIDE_INT target_newline;
221 static unsigned HOST_WIDE_INT target_percent;
222 static unsigned HOST_WIDE_INT target_c;
223 static unsigned HOST_WIDE_INT target_s;
224 static char target_percent_c[3];
225 static char target_percent_s[3];
226 static char target_percent_s_newline[4];
227 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
228 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
229 static tree do_mpfr_arg2 (tree, tree, tree,
230 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
231 static tree do_mpfr_arg3 (tree, tree, tree, tree,
232 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
233 static tree do_mpfr_sincos (tree, tree, tree);
234 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
235 static tree do_mpfr_bessel_n (tree, tree, tree,
236 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
237 const REAL_VALUE_TYPE *, bool);
238 static tree do_mpfr_remquo (tree, tree, tree);
239 static tree do_mpfr_lgamma_r (tree, tree, tree);
240 #endif
242 /* Return true if NODE should be considered for inline expansion regardless
243 of the optimization level. This means whenever a function is invoked with
244 its "internal" name, which normally contains the prefix "__builtin". */
246 static bool called_as_built_in (tree node)
248 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
249 if (strncmp (name, "__builtin_", 10) == 0)
250 return true;
251 if (strncmp (name, "__sync_", 7) == 0)
252 return true;
253 return false;
256 /* Return the alignment in bits of EXP, a pointer valued expression.
257 But don't return more than MAX_ALIGN no matter what.
258 The alignment returned is, by default, the alignment of the thing that
259 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
261 Otherwise, look at the expression to see if we can do better, i.e., if the
262 expression is actually pointing at an object whose alignment is tighter. */
265 get_pointer_alignment (tree exp, unsigned int max_align)
267 unsigned int align, inner;
269 /* We rely on TER to compute accurate alignment information. */
270 if (!(optimize && flag_tree_ter))
271 return 0;
273 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
274 return 0;
276 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
277 align = MIN (align, max_align);
279 while (1)
281 switch (TREE_CODE (exp))
283 case NOP_EXPR:
284 case CONVERT_EXPR:
285 case NON_LVALUE_EXPR:
286 exp = TREE_OPERAND (exp, 0);
287 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
288 return align;
290 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
291 align = MIN (inner, max_align);
292 break;
294 case PLUS_EXPR:
295 /* If sum of pointer + int, restrict our maximum alignment to that
296 imposed by the integer. If not, we can't do any better than
297 ALIGN. */
298 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
299 return align;
301 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
302 & (max_align / BITS_PER_UNIT - 1))
303 != 0)
304 max_align >>= 1;
306 exp = TREE_OPERAND (exp, 0);
307 break;
309 case ADDR_EXPR:
310 /* See what we are pointing at and look at its alignment. */
311 exp = TREE_OPERAND (exp, 0);
312 inner = max_align;
313 if (handled_component_p (exp))
315 HOST_WIDE_INT bitsize, bitpos;
316 tree offset;
317 enum machine_mode mode;
318 int unsignedp, volatilep;
320 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
321 &mode, &unsignedp, &volatilep, true);
322 if (bitpos)
323 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
324 if (offset && TREE_CODE (offset) == PLUS_EXPR
325 && host_integerp (TREE_OPERAND (offset, 1), 1))
327 /* Any overflow in calculating offset_bits won't change
328 the alignment. */
329 unsigned offset_bits
330 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
331 * BITS_PER_UNIT);
333 if (offset_bits)
334 inner = MIN (inner, (offset_bits & -offset_bits));
335 offset = TREE_OPERAND (offset, 0);
337 if (offset && TREE_CODE (offset) == MULT_EXPR
338 && host_integerp (TREE_OPERAND (offset, 1), 1))
340 /* Any overflow in calculating offset_factor won't change
341 the alignment. */
342 unsigned offset_factor
343 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
344 * BITS_PER_UNIT);
346 if (offset_factor)
347 inner = MIN (inner, (offset_factor & -offset_factor));
349 else if (offset)
350 inner = MIN (inner, BITS_PER_UNIT);
352 if (TREE_CODE (exp) == FUNCTION_DECL)
353 align = FUNCTION_BOUNDARY;
354 else if (DECL_P (exp))
355 align = MIN (inner, DECL_ALIGN (exp));
356 #ifdef CONSTANT_ALIGNMENT
357 else if (CONSTANT_CLASS_P (exp))
358 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
359 #endif
360 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
361 || TREE_CODE (exp) == INDIRECT_REF)
362 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
363 else
364 align = MIN (align, inner);
365 return MIN (align, max_align);
367 default:
368 return align;
373 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
374 way, because it could contain a zero byte in the middle.
375 TREE_STRING_LENGTH is the size of the character array, not the string.
377 ONLY_VALUE should be nonzero if the result is not going to be emitted
378 into the instruction stream and zero if it is going to be expanded.
379 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
380 is returned, otherwise NULL, since
381 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
382 evaluate the side-effects.
384 The value returned is of type `ssizetype'.
386 Unfortunately, string_constant can't access the values of const char
387 arrays with initializers, so neither can we do so here. */
389 tree
390 c_strlen (tree src, int only_value)
392 tree offset_node;
393 HOST_WIDE_INT offset;
394 int max;
395 const char *ptr;
397 STRIP_NOPS (src);
398 if (TREE_CODE (src) == COND_EXPR
399 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
401 tree len1, len2;
403 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
404 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
405 if (tree_int_cst_equal (len1, len2))
406 return len1;
409 if (TREE_CODE (src) == COMPOUND_EXPR
410 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
411 return c_strlen (TREE_OPERAND (src, 1), only_value);
413 src = string_constant (src, &offset_node);
414 if (src == 0)
415 return NULL_TREE;
417 max = TREE_STRING_LENGTH (src) - 1;
418 ptr = TREE_STRING_POINTER (src);
420 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
422 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
423 compute the offset to the following null if we don't know where to
424 start searching for it. */
425 int i;
427 for (i = 0; i < max; i++)
428 if (ptr[i] == 0)
429 return NULL_TREE;
431 /* We don't know the starting offset, but we do know that the string
432 has no internal zero bytes. We can assume that the offset falls
433 within the bounds of the string; otherwise, the programmer deserves
434 what he gets. Subtract the offset from the length of the string,
435 and return that. This would perhaps not be valid if we were dealing
436 with named arrays in addition to literal string constants. */
438 return size_diffop (size_int (max), offset_node);
441 /* We have a known offset into the string. Start searching there for
442 a null character if we can represent it as a single HOST_WIDE_INT. */
443 if (offset_node == 0)
444 offset = 0;
445 else if (! host_integerp (offset_node, 0))
446 offset = -1;
447 else
448 offset = tree_low_cst (offset_node, 0);
450 /* If the offset is known to be out of bounds, warn, and call strlen at
451 runtime. */
452 if (offset < 0 || offset > max)
454 warning (0, "offset outside bounds of constant string");
455 return NULL_TREE;
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
470 static const char *
471 c_getstr (tree src)
473 tree offset_node;
475 src = string_constant (src, &offset_node);
476 if (src == 0)
477 return 0;
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
483 return 0;
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
491 static rtx
492 c_readstr (const char *str, enum machine_mode mode)
494 HOST_WIDE_INT c[2];
495 HOST_WIDE_INT ch;
496 unsigned int i, j;
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
500 c[0] = 0;
501 c[1] = 0;
502 ch = 1;
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
505 j = i;
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
511 j *= BITS_PER_UNIT;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
514 if (ch)
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
518 return immed_double_const (c[0], c[1], mode);
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
523 P. */
525 static int
526 target_char_cast (tree cst, char *p)
528 unsigned HOST_WIDE_INT val, hostval;
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
532 return 1;
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
538 hostval = val;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
542 if (val != hostval)
543 return 1;
545 *p = hostval;
546 return 0;
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
553 static tree
554 builtin_save_expr (tree exp)
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
559 return exp;
561 return save_expr (exp);
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
568 static rtx
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
571 int i;
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
575 #else
576 rtx tem;
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
589 else
591 tem = hard_frame_pointer_rtx;
593 /* Tell reload not to eliminate the frame pointer. */
594 current_function_accesses_prior_frames = 1;
596 #endif
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
602 if (count > 0)
603 SETUP_FRAME_ADDRESSES ();
604 #endif
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
612 count--;
613 #endif
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
622 #endif
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
633 #else
634 return tem;
635 #endif
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
640 #else
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
644 #endif
645 return tem;
648 /* Alias set used for setjmp buffer. */
649 static HOST_WIDE_INT setjmp_alias_set = -1;
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
655 void
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
659 rtx stack_save;
660 rtx mem;
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
665 buf_addr = convert_memory_address (Pmode, buf_addr);
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
693 #endif
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 current_function_calls_setjmp = 1;
699 /* We have a nonlocal label. */
700 current_function_has_nonlocal_label = 1;
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
706 void
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
721 #endif
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
732 #ifdef ELIMINABLE_REGS
733 size_t i;
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
739 break;
741 if (i == ARRAY_SIZE (elim_regs))
742 #endif
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx,
747 copy_to_reg (get_arg_pointer_save_area (cfun)));
750 #endif
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
755 else
756 #endif
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
760 else
761 #endif
762 { /* Nothing */ }
764 /* @@@ This is a kludge. Not all machine descriptions define a blockage
765 insn, but we must not allow the code we just generated to be reordered
766 by scheduling. Specifically, the update of the frame pointer must
767 happen immediately, not later. So emit an ASM_INPUT to act as blockage
768 insn. */
769 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
772 /* __builtin_longjmp is passed a pointer to an array of five words (not
773 all will be used on all machines). It operates similarly to the C
774 library function of the same name, but is more efficient. Much of
775 the code below is copied from the handling of non-local gotos. */
777 static void
778 expand_builtin_longjmp (rtx buf_addr, rtx value)
780 rtx fp, lab, stack, insn, last;
781 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
783 if (setjmp_alias_set == -1)
784 setjmp_alias_set = new_alias_set ();
786 buf_addr = convert_memory_address (Pmode, buf_addr);
788 buf_addr = force_reg (Pmode, buf_addr);
790 /* We used to store value in static_chain_rtx, but that fails if pointers
791 are smaller than integers. We instead require that the user must pass
792 a second argument of 1, because that is what builtin_setjmp will
793 return. This also makes EH slightly more efficient, since we are no
794 longer copying around a value that we don't care about. */
795 gcc_assert (value == const1_rtx);
797 last = get_last_insn ();
798 #ifdef HAVE_builtin_longjmp
799 if (HAVE_builtin_longjmp)
800 emit_insn (gen_builtin_longjmp (buf_addr));
801 else
802 #endif
804 fp = gen_rtx_MEM (Pmode, buf_addr);
805 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
806 GET_MODE_SIZE (Pmode)));
808 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
809 2 * GET_MODE_SIZE (Pmode)));
810 set_mem_alias_set (fp, setjmp_alias_set);
811 set_mem_alias_set (lab, setjmp_alias_set);
812 set_mem_alias_set (stack, setjmp_alias_set);
814 /* Pick up FP, label, and SP from the block and jump. This code is
815 from expand_goto in stmt.c; see there for detailed comments. */
816 #ifdef HAVE_nonlocal_goto
817 if (HAVE_nonlocal_goto)
818 /* We have to pass a value to the nonlocal_goto pattern that will
819 get copied into the static_chain pointer, but it does not matter
820 what that value is, because builtin_setjmp does not use it. */
821 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
822 else
823 #endif
825 lab = copy_to_reg (lab);
827 emit_insn (gen_rtx_CLOBBER (VOIDmode,
828 gen_rtx_MEM (BLKmode,
829 gen_rtx_SCRATCH (VOIDmode))));
830 emit_insn (gen_rtx_CLOBBER (VOIDmode,
831 gen_rtx_MEM (BLKmode,
832 hard_frame_pointer_rtx)));
834 emit_move_insn (hard_frame_pointer_rtx, fp);
835 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
837 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
838 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
839 emit_indirect_jump (lab);
843 /* Search backwards and mark the jump insn as a non-local goto.
844 Note that this precludes the use of __builtin_longjmp to a
845 __builtin_setjmp target in the same function. However, we've
846 already cautioned the user that these functions are for
847 internal exception handling use only. */
848 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
850 gcc_assert (insn != last);
852 if (JUMP_P (insn))
854 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
855 REG_NOTES (insn));
856 break;
858 else if (CALL_P (insn))
859 break;
863 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
864 and the address of the save area. */
866 static rtx
867 expand_builtin_nonlocal_goto (tree exp)
869 tree t_label, t_save_area;
870 rtx r_label, r_save_area, r_fp, r_sp, insn;
872 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
873 return NULL_RTX;
875 t_label = CALL_EXPR_ARG (exp, 0);
876 t_save_area = CALL_EXPR_ARG (exp, 1);
878 r_label = expand_normal (t_label);
879 r_label = convert_memory_address (Pmode, r_label);
880 r_save_area = expand_normal (t_save_area);
881 r_save_area = convert_memory_address (Pmode, r_save_area);
882 r_fp = gen_rtx_MEM (Pmode, r_save_area);
883 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
884 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
886 current_function_has_nonlocal_goto = 1;
888 #ifdef HAVE_nonlocal_goto
889 /* ??? We no longer need to pass the static chain value, afaik. */
890 if (HAVE_nonlocal_goto)
891 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
892 else
893 #endif
895 r_label = copy_to_reg (r_label);
897 emit_insn (gen_rtx_CLOBBER (VOIDmode,
898 gen_rtx_MEM (BLKmode,
899 gen_rtx_SCRATCH (VOIDmode))));
901 emit_insn (gen_rtx_CLOBBER (VOIDmode,
902 gen_rtx_MEM (BLKmode,
903 hard_frame_pointer_rtx)));
905 /* Restore frame pointer for containing function.
906 This sets the actual hard register used for the frame pointer
907 to the location of the function's incoming static chain info.
908 The non-local goto handler will then adjust it to contain the
909 proper value and reload the argument pointer, if needed. */
910 emit_move_insn (hard_frame_pointer_rtx, r_fp);
911 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
913 /* USE of hard_frame_pointer_rtx added for consistency;
914 not clear if really needed. */
915 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
916 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
917 emit_indirect_jump (r_label);
920 /* Search backwards to the jump insn and mark it as a
921 non-local goto. */
922 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
924 if (JUMP_P (insn))
926 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
927 const0_rtx, REG_NOTES (insn));
928 break;
930 else if (CALL_P (insn))
931 break;
934 return const0_rtx;
937 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
938 (not all will be used on all machines) that was passed to __builtin_setjmp.
939 It updates the stack pointer in that block to correspond to the current
940 stack pointer. */
942 static void
943 expand_builtin_update_setjmp_buf (rtx buf_addr)
945 enum machine_mode sa_mode = Pmode;
946 rtx stack_save;
949 #ifdef HAVE_save_stack_nonlocal
950 if (HAVE_save_stack_nonlocal)
951 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
952 #endif
953 #ifdef STACK_SAVEAREA_MODE
954 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
955 #endif
957 stack_save
958 = gen_rtx_MEM (sa_mode,
959 memory_address
960 (sa_mode,
961 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
963 #ifdef HAVE_setjmp
964 if (HAVE_setjmp)
965 emit_insn (gen_setjmp ());
966 #endif
968 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
971 /* Expand a call to __builtin_prefetch. For a target that does not support
972 data prefetch, evaluate the memory address argument in case it has side
973 effects. */
975 static void
976 expand_builtin_prefetch (tree exp)
978 tree arg0, arg1, arg2;
979 int nargs;
980 rtx op0, op1, op2;
982 if (!validate_arglist (exp, POINTER_TYPE, 0))
983 return;
985 arg0 = CALL_EXPR_ARG (exp, 0);
987 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
988 zero (read) and argument 2 (locality) defaults to 3 (high degree of
989 locality). */
990 nargs = call_expr_nargs (exp);
991 if (nargs > 1)
992 arg1 = CALL_EXPR_ARG (exp, 1);
993 else
994 arg1 = integer_zero_node;
995 if (nargs > 2)
996 arg2 = CALL_EXPR_ARG (exp, 2);
997 else
998 arg2 = build_int_cst (NULL_TREE, 3);
1000 /* Argument 0 is an address. */
1001 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1003 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1004 if (TREE_CODE (arg1) != INTEGER_CST)
1006 error ("second argument to %<__builtin_prefetch%> must be a constant");
1007 arg1 = integer_zero_node;
1009 op1 = expand_normal (arg1);
1010 /* Argument 1 must be either zero or one. */
1011 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1013 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1014 " using zero");
1015 op1 = const0_rtx;
1018 /* Argument 2 (locality) must be a compile-time constant int. */
1019 if (TREE_CODE (arg2) != INTEGER_CST)
1021 error ("third argument to %<__builtin_prefetch%> must be a constant");
1022 arg2 = integer_zero_node;
1024 op2 = expand_normal (arg2);
1025 /* Argument 2 must be 0, 1, 2, or 3. */
1026 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1028 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1029 op2 = const0_rtx;
1032 #ifdef HAVE_prefetch
1033 if (HAVE_prefetch)
1035 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1036 (op0,
1037 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1038 || (GET_MODE (op0) != Pmode))
1040 op0 = convert_memory_address (Pmode, op0);
1041 op0 = force_reg (Pmode, op0);
1043 emit_insn (gen_prefetch (op0, op1, op2));
1045 #endif
1047 /* Don't do anything with direct references to volatile memory, but
1048 generate code to handle other side effects. */
1049 if (!MEM_P (op0) && side_effects_p (op0))
1050 emit_insn (op0);
1053 /* Get a MEM rtx for expression EXP which is the address of an operand
1054 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1055 the maximum length of the block of memory that might be accessed or
1056 NULL if unknown. */
1058 static rtx
1059 get_memory_rtx (tree exp, tree len)
1061 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1062 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1064 /* Get an expression we can use to find the attributes to assign to MEM.
1065 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1066 we can. First remove any nops. */
1067 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1068 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1069 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1070 exp = TREE_OPERAND (exp, 0);
1072 if (TREE_CODE (exp) == ADDR_EXPR)
1073 exp = TREE_OPERAND (exp, 0);
1074 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1075 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1076 else
1077 exp = NULL;
1079 /* Honor attributes derived from exp, except for the alias set
1080 (as builtin stringops may alias with anything) and the size
1081 (as stringops may access multiple array elements). */
1082 if (exp)
1084 set_mem_attributes (mem, exp, 0);
1086 /* Allow the string and memory builtins to overflow from one
1087 field into another, see http://gcc.gnu.org/PR23561.
1088 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1089 memory accessed by the string or memory builtin will fit
1090 within the field. */
1091 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1093 tree mem_expr = MEM_EXPR (mem);
1094 HOST_WIDE_INT offset = -1, length = -1;
1095 tree inner = exp;
1097 while (TREE_CODE (inner) == ARRAY_REF
1098 || TREE_CODE (inner) == NOP_EXPR
1099 || TREE_CODE (inner) == CONVERT_EXPR
1100 || TREE_CODE (inner) == NON_LVALUE_EXPR
1101 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1102 || TREE_CODE (inner) == SAVE_EXPR)
1103 inner = TREE_OPERAND (inner, 0);
1105 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1107 if (MEM_OFFSET (mem)
1108 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1109 offset = INTVAL (MEM_OFFSET (mem));
1111 if (offset >= 0 && len && host_integerp (len, 0))
1112 length = tree_low_cst (len, 0);
1114 while (TREE_CODE (inner) == COMPONENT_REF)
1116 tree field = TREE_OPERAND (inner, 1);
1117 gcc_assert (! DECL_BIT_FIELD (field));
1118 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1119 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1121 if (length >= 0
1122 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1123 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1125 HOST_WIDE_INT size
1126 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1127 /* If we can prove the memory starting at XEXP (mem, 0)
1128 and ending at XEXP (mem, 0) + LENGTH will fit into
1129 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1130 if (offset <= size
1131 && length <= size
1132 && offset + length <= size)
1133 break;
1136 if (offset >= 0
1137 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1138 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1139 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1140 / BITS_PER_UNIT;
1141 else
1143 offset = -1;
1144 length = -1;
1147 mem_expr = TREE_OPERAND (mem_expr, 0);
1148 inner = TREE_OPERAND (inner, 0);
1151 if (mem_expr == NULL)
1152 offset = -1;
1153 if (mem_expr != MEM_EXPR (mem))
1155 set_mem_expr (mem, mem_expr);
1156 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1159 set_mem_alias_set (mem, 0);
1160 set_mem_size (mem, NULL_RTX);
1163 return mem;
1166 /* Built-in functions to perform an untyped call and return. */
1168 /* For each register that may be used for calling a function, this
1169 gives a mode used to copy the register's value. VOIDmode indicates
1170 the register is not used for calling a function. If the machine
1171 has register windows, this gives only the outbound registers.
1172 INCOMING_REGNO gives the corresponding inbound register. */
1173 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1175 /* For each register that may be used for returning values, this gives
1176 a mode used to copy the register's value. VOIDmode indicates the
1177 register is not used for returning values. If the machine has
1178 register windows, this gives only the outbound registers.
1179 INCOMING_REGNO gives the corresponding inbound register. */
1180 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1182 /* For each register that may be used for calling a function, this
1183 gives the offset of that register into the block returned by
1184 __builtin_apply_args. 0 indicates that the register is not
1185 used for calling a function. */
1186 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1188 /* Return the size required for the block returned by __builtin_apply_args,
1189 and initialize apply_args_mode. */
1191 static int
1192 apply_args_size (void)
1194 static int size = -1;
1195 int align;
1196 unsigned int regno;
1197 enum machine_mode mode;
1199 /* The values computed by this function never change. */
1200 if (size < 0)
1202 /* The first value is the incoming arg-pointer. */
1203 size = GET_MODE_SIZE (Pmode);
1205 /* The second value is the structure value address unless this is
1206 passed as an "invisible" first argument. */
1207 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1208 size += GET_MODE_SIZE (Pmode);
1210 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1211 if (FUNCTION_ARG_REGNO_P (regno))
1213 mode = reg_raw_mode[regno];
1215 gcc_assert (mode != VOIDmode);
1217 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1218 if (size % align != 0)
1219 size = CEIL (size, align) * align;
1220 apply_args_reg_offset[regno] = size;
1221 size += GET_MODE_SIZE (mode);
1222 apply_args_mode[regno] = mode;
1224 else
1226 apply_args_mode[regno] = VOIDmode;
1227 apply_args_reg_offset[regno] = 0;
1230 return size;
1233 /* Return the size required for the block returned by __builtin_apply,
1234 and initialize apply_result_mode. */
1236 static int
1237 apply_result_size (void)
1239 static int size = -1;
1240 int align, regno;
1241 enum machine_mode mode;
1243 /* The values computed by this function never change. */
1244 if (size < 0)
1246 size = 0;
1248 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1249 if (FUNCTION_VALUE_REGNO_P (regno))
1251 mode = reg_raw_mode[regno];
1253 gcc_assert (mode != VOIDmode);
1255 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1256 if (size % align != 0)
1257 size = CEIL (size, align) * align;
1258 size += GET_MODE_SIZE (mode);
1259 apply_result_mode[regno] = mode;
1261 else
1262 apply_result_mode[regno] = VOIDmode;
1264 /* Allow targets that use untyped_call and untyped_return to override
1265 the size so that machine-specific information can be stored here. */
1266 #ifdef APPLY_RESULT_SIZE
1267 size = APPLY_RESULT_SIZE;
1268 #endif
1270 return size;
1273 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1274 /* Create a vector describing the result block RESULT. If SAVEP is true,
1275 the result block is used to save the values; otherwise it is used to
1276 restore the values. */
1278 static rtx
1279 result_vector (int savep, rtx result)
1281 int regno, size, align, nelts;
1282 enum machine_mode mode;
1283 rtx reg, mem;
1284 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1286 size = nelts = 0;
1287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1288 if ((mode = apply_result_mode[regno]) != VOIDmode)
1290 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1291 if (size % align != 0)
1292 size = CEIL (size, align) * align;
1293 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1294 mem = adjust_address (result, mode, size);
1295 savevec[nelts++] = (savep
1296 ? gen_rtx_SET (VOIDmode, mem, reg)
1297 : gen_rtx_SET (VOIDmode, reg, mem));
1298 size += GET_MODE_SIZE (mode);
1300 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1302 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1304 /* Save the state required to perform an untyped call with the same
1305 arguments as were passed to the current function. */
1307 static rtx
1308 expand_builtin_apply_args_1 (void)
1310 rtx registers, tem;
1311 int size, align, regno;
1312 enum machine_mode mode;
1313 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1315 /* Create a block where the arg-pointer, structure value address,
1316 and argument registers can be saved. */
1317 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1319 /* Walk past the arg-pointer and structure value address. */
1320 size = GET_MODE_SIZE (Pmode);
1321 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1322 size += GET_MODE_SIZE (Pmode);
1324 /* Save each register used in calling a function to the block. */
1325 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1326 if ((mode = apply_args_mode[regno]) != VOIDmode)
1328 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1329 if (size % align != 0)
1330 size = CEIL (size, align) * align;
1332 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1334 emit_move_insn (adjust_address (registers, mode, size), tem);
1335 size += GET_MODE_SIZE (mode);
1338 /* Save the arg pointer to the block. */
1339 tem = copy_to_reg (virtual_incoming_args_rtx);
1340 #ifdef STACK_GROWS_DOWNWARD
1341 /* We need the pointer as the caller actually passed them to us, not
1342 as we might have pretended they were passed. Make sure it's a valid
1343 operand, as emit_move_insn isn't expected to handle a PLUS. */
1345 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1346 NULL_RTX);
1347 #endif
1348 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1350 size = GET_MODE_SIZE (Pmode);
1352 /* Save the structure value address unless this is passed as an
1353 "invisible" first argument. */
1354 if (struct_incoming_value)
1356 emit_move_insn (adjust_address (registers, Pmode, size),
1357 copy_to_reg (struct_incoming_value));
1358 size += GET_MODE_SIZE (Pmode);
1361 /* Return the address of the block. */
1362 return copy_addr_to_reg (XEXP (registers, 0));
1365 /* __builtin_apply_args returns block of memory allocated on
1366 the stack into which is stored the arg pointer, structure
1367 value address, static chain, and all the registers that might
1368 possibly be used in performing a function call. The code is
1369 moved to the start of the function so the incoming values are
1370 saved. */
1372 static rtx
1373 expand_builtin_apply_args (void)
1375 /* Don't do __builtin_apply_args more than once in a function.
1376 Save the result of the first call and reuse it. */
1377 if (apply_args_value != 0)
1378 return apply_args_value;
1380 /* When this function is called, it means that registers must be
1381 saved on entry to this function. So we migrate the
1382 call to the first insn of this function. */
1383 rtx temp;
1384 rtx seq;
1386 start_sequence ();
1387 temp = expand_builtin_apply_args_1 ();
1388 seq = get_insns ();
1389 end_sequence ();
1391 apply_args_value = temp;
1393 /* Put the insns after the NOTE that starts the function.
1394 If this is inside a start_sequence, make the outer-level insn
1395 chain current, so the code is placed at the start of the
1396 function. */
1397 push_topmost_sequence ();
1398 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1399 pop_topmost_sequence ();
1400 return temp;
1404 /* Perform an untyped call and save the state required to perform an
1405 untyped return of whatever value was returned by the given function. */
1407 static rtx
1408 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1410 int size, align, regno;
1411 enum machine_mode mode;
1412 rtx incoming_args, result, reg, dest, src, call_insn;
1413 rtx old_stack_level = 0;
1414 rtx call_fusage = 0;
1415 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1417 arguments = convert_memory_address (Pmode, arguments);
1419 /* Create a block where the return registers can be saved. */
1420 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1422 /* Fetch the arg pointer from the ARGUMENTS block. */
1423 incoming_args = gen_reg_rtx (Pmode);
1424 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1425 #ifndef STACK_GROWS_DOWNWARD
1426 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1427 incoming_args, 0, OPTAB_LIB_WIDEN);
1428 #endif
1430 /* Push a new argument block and copy the arguments. Do not allow
1431 the (potential) memcpy call below to interfere with our stack
1432 manipulations. */
1433 do_pending_stack_adjust ();
1434 NO_DEFER_POP;
1436 /* Save the stack with nonlocal if available. */
1437 #ifdef HAVE_save_stack_nonlocal
1438 if (HAVE_save_stack_nonlocal)
1439 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1440 else
1441 #endif
1442 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1444 /* Allocate a block of memory onto the stack and copy the memory
1445 arguments to the outgoing arguments address. */
1446 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1447 dest = virtual_outgoing_args_rtx;
1448 #ifndef STACK_GROWS_DOWNWARD
1449 if (GET_CODE (argsize) == CONST_INT)
1450 dest = plus_constant (dest, -INTVAL (argsize));
1451 else
1452 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1453 #endif
1454 dest = gen_rtx_MEM (BLKmode, dest);
1455 set_mem_align (dest, PARM_BOUNDARY);
1456 src = gen_rtx_MEM (BLKmode, incoming_args);
1457 set_mem_align (src, PARM_BOUNDARY);
1458 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1460 /* Refer to the argument block. */
1461 apply_args_size ();
1462 arguments = gen_rtx_MEM (BLKmode, arguments);
1463 set_mem_align (arguments, PARM_BOUNDARY);
1465 /* Walk past the arg-pointer and structure value address. */
1466 size = GET_MODE_SIZE (Pmode);
1467 if (struct_value)
1468 size += GET_MODE_SIZE (Pmode);
1470 /* Restore each of the registers previously saved. Make USE insns
1471 for each of these registers for use in making the call. */
1472 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1473 if ((mode = apply_args_mode[regno]) != VOIDmode)
1475 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1476 if (size % align != 0)
1477 size = CEIL (size, align) * align;
1478 reg = gen_rtx_REG (mode, regno);
1479 emit_move_insn (reg, adjust_address (arguments, mode, size));
1480 use_reg (&call_fusage, reg);
1481 size += GET_MODE_SIZE (mode);
1484 /* Restore the structure value address unless this is passed as an
1485 "invisible" first argument. */
1486 size = GET_MODE_SIZE (Pmode);
1487 if (struct_value)
1489 rtx value = gen_reg_rtx (Pmode);
1490 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1491 emit_move_insn (struct_value, value);
1492 if (REG_P (struct_value))
1493 use_reg (&call_fusage, struct_value);
1494 size += GET_MODE_SIZE (Pmode);
1497 /* All arguments and registers used for the call are set up by now! */
1498 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1500 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1501 and we don't want to load it into a register as an optimization,
1502 because prepare_call_address already did it if it should be done. */
1503 if (GET_CODE (function) != SYMBOL_REF)
1504 function = memory_address (FUNCTION_MODE, function);
1506 /* Generate the actual call instruction and save the return value. */
1507 #ifdef HAVE_untyped_call
1508 if (HAVE_untyped_call)
1509 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1510 result, result_vector (1, result)));
1511 else
1512 #endif
1513 #ifdef HAVE_call_value
1514 if (HAVE_call_value)
1516 rtx valreg = 0;
1518 /* Locate the unique return register. It is not possible to
1519 express a call that sets more than one return register using
1520 call_value; use untyped_call for that. In fact, untyped_call
1521 only needs to save the return registers in the given block. */
1522 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1523 if ((mode = apply_result_mode[regno]) != VOIDmode)
1525 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1527 valreg = gen_rtx_REG (mode, regno);
1530 emit_call_insn (GEN_CALL_VALUE (valreg,
1531 gen_rtx_MEM (FUNCTION_MODE, function),
1532 const0_rtx, NULL_RTX, const0_rtx));
1534 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1536 else
1537 #endif
1538 gcc_unreachable ();
1540 /* Find the CALL insn we just emitted, and attach the register usage
1541 information. */
1542 call_insn = last_call_insn ();
1543 add_function_usage_to (call_insn, call_fusage);
1545 /* Restore the stack. */
1546 #ifdef HAVE_save_stack_nonlocal
1547 if (HAVE_save_stack_nonlocal)
1548 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1549 else
1550 #endif
1551 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1553 OK_DEFER_POP;
1555 /* Return the address of the result block. */
1556 result = copy_addr_to_reg (XEXP (result, 0));
1557 return convert_memory_address (ptr_mode, result);
1560 /* Perform an untyped return. */
1562 static void
1563 expand_builtin_return (rtx result)
1565 int size, align, regno;
1566 enum machine_mode mode;
1567 rtx reg;
1568 rtx call_fusage = 0;
1570 result = convert_memory_address (Pmode, result);
1572 apply_result_size ();
1573 result = gen_rtx_MEM (BLKmode, result);
1575 #ifdef HAVE_untyped_return
1576 if (HAVE_untyped_return)
1578 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1579 emit_barrier ();
1580 return;
1582 #endif
1584 /* Restore the return value and note that each value is used. */
1585 size = 0;
1586 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1587 if ((mode = apply_result_mode[regno]) != VOIDmode)
1589 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1590 if (size % align != 0)
1591 size = CEIL (size, align) * align;
1592 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1593 emit_move_insn (reg, adjust_address (result, mode, size));
1595 push_to_sequence (call_fusage);
1596 emit_insn (gen_rtx_USE (VOIDmode, reg));
1597 call_fusage = get_insns ();
1598 end_sequence ();
1599 size += GET_MODE_SIZE (mode);
1602 /* Put the USE insns before the return. */
1603 emit_insn (call_fusage);
1605 /* Return whatever values was restored by jumping directly to the end
1606 of the function. */
1607 expand_naked_return ();
1610 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1612 static enum type_class
1613 type_to_class (tree type)
1615 switch (TREE_CODE (type))
1617 case VOID_TYPE: return void_type_class;
1618 case INTEGER_TYPE: return integer_type_class;
1619 case ENUMERAL_TYPE: return enumeral_type_class;
1620 case BOOLEAN_TYPE: return boolean_type_class;
1621 case POINTER_TYPE: return pointer_type_class;
1622 case REFERENCE_TYPE: return reference_type_class;
1623 case OFFSET_TYPE: return offset_type_class;
1624 case REAL_TYPE: return real_type_class;
1625 case COMPLEX_TYPE: return complex_type_class;
1626 case FUNCTION_TYPE: return function_type_class;
1627 case METHOD_TYPE: return method_type_class;
1628 case RECORD_TYPE: return record_type_class;
1629 case UNION_TYPE:
1630 case QUAL_UNION_TYPE: return union_type_class;
1631 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1632 ? string_type_class : array_type_class);
1633 case LANG_TYPE: return lang_type_class;
1634 default: return no_type_class;
1638 /* Expand a call EXP to __builtin_classify_type. */
1640 static rtx
1641 expand_builtin_classify_type (tree exp)
1643 if (call_expr_nargs (exp))
1644 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1645 return GEN_INT (no_type_class);
1648 /* This helper macro, meant to be used in mathfn_built_in below,
1649 determines which among a set of three builtin math functions is
1650 appropriate for a given type mode. The `F' and `L' cases are
1651 automatically generated from the `double' case. */
1652 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1653 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1654 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1655 fcodel = BUILT_IN_MATHFN##L ; break;
1656 /* Similar to above, but appends _R after any F/L suffix. */
1657 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1658 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1659 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1660 fcodel = BUILT_IN_MATHFN##L_R ; break;
1662 /* Return mathematic function equivalent to FN but operating directly
1663 on TYPE, if available. If we can't do the conversion, return zero. */
1664 tree
1665 mathfn_built_in (tree type, enum built_in_function fn)
1667 enum built_in_function fcode, fcodef, fcodel;
1669 switch (fn)
1671 CASE_MATHFN (BUILT_IN_ACOS)
1672 CASE_MATHFN (BUILT_IN_ACOSH)
1673 CASE_MATHFN (BUILT_IN_ASIN)
1674 CASE_MATHFN (BUILT_IN_ASINH)
1675 CASE_MATHFN (BUILT_IN_ATAN)
1676 CASE_MATHFN (BUILT_IN_ATAN2)
1677 CASE_MATHFN (BUILT_IN_ATANH)
1678 CASE_MATHFN (BUILT_IN_CBRT)
1679 CASE_MATHFN (BUILT_IN_CEIL)
1680 CASE_MATHFN (BUILT_IN_CEXPI)
1681 CASE_MATHFN (BUILT_IN_COPYSIGN)
1682 CASE_MATHFN (BUILT_IN_COS)
1683 CASE_MATHFN (BUILT_IN_COSH)
1684 CASE_MATHFN (BUILT_IN_DREM)
1685 CASE_MATHFN (BUILT_IN_ERF)
1686 CASE_MATHFN (BUILT_IN_ERFC)
1687 CASE_MATHFN (BUILT_IN_EXP)
1688 CASE_MATHFN (BUILT_IN_EXP10)
1689 CASE_MATHFN (BUILT_IN_EXP2)
1690 CASE_MATHFN (BUILT_IN_EXPM1)
1691 CASE_MATHFN (BUILT_IN_FABS)
1692 CASE_MATHFN (BUILT_IN_FDIM)
1693 CASE_MATHFN (BUILT_IN_FLOOR)
1694 CASE_MATHFN (BUILT_IN_FMA)
1695 CASE_MATHFN (BUILT_IN_FMAX)
1696 CASE_MATHFN (BUILT_IN_FMIN)
1697 CASE_MATHFN (BUILT_IN_FMOD)
1698 CASE_MATHFN (BUILT_IN_FREXP)
1699 CASE_MATHFN (BUILT_IN_GAMMA)
1700 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1701 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1702 CASE_MATHFN (BUILT_IN_HYPOT)
1703 CASE_MATHFN (BUILT_IN_ILOGB)
1704 CASE_MATHFN (BUILT_IN_INF)
1705 CASE_MATHFN (BUILT_IN_ISINF)
1706 CASE_MATHFN (BUILT_IN_J0)
1707 CASE_MATHFN (BUILT_IN_J1)
1708 CASE_MATHFN (BUILT_IN_JN)
1709 CASE_MATHFN (BUILT_IN_LCEIL)
1710 CASE_MATHFN (BUILT_IN_LDEXP)
1711 CASE_MATHFN (BUILT_IN_LFLOOR)
1712 CASE_MATHFN (BUILT_IN_LGAMMA)
1713 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1714 CASE_MATHFN (BUILT_IN_LLCEIL)
1715 CASE_MATHFN (BUILT_IN_LLFLOOR)
1716 CASE_MATHFN (BUILT_IN_LLRINT)
1717 CASE_MATHFN (BUILT_IN_LLROUND)
1718 CASE_MATHFN (BUILT_IN_LOG)
1719 CASE_MATHFN (BUILT_IN_LOG10)
1720 CASE_MATHFN (BUILT_IN_LOG1P)
1721 CASE_MATHFN (BUILT_IN_LOG2)
1722 CASE_MATHFN (BUILT_IN_LOGB)
1723 CASE_MATHFN (BUILT_IN_LRINT)
1724 CASE_MATHFN (BUILT_IN_LROUND)
1725 CASE_MATHFN (BUILT_IN_MODF)
1726 CASE_MATHFN (BUILT_IN_NAN)
1727 CASE_MATHFN (BUILT_IN_NANS)
1728 CASE_MATHFN (BUILT_IN_NEARBYINT)
1729 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1730 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1731 CASE_MATHFN (BUILT_IN_POW)
1732 CASE_MATHFN (BUILT_IN_POWI)
1733 CASE_MATHFN (BUILT_IN_POW10)
1734 CASE_MATHFN (BUILT_IN_REMAINDER)
1735 CASE_MATHFN (BUILT_IN_REMQUO)
1736 CASE_MATHFN (BUILT_IN_RINT)
1737 CASE_MATHFN (BUILT_IN_ROUND)
1738 CASE_MATHFN (BUILT_IN_SCALB)
1739 CASE_MATHFN (BUILT_IN_SCALBLN)
1740 CASE_MATHFN (BUILT_IN_SCALBN)
1741 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1742 CASE_MATHFN (BUILT_IN_SIN)
1743 CASE_MATHFN (BUILT_IN_SINCOS)
1744 CASE_MATHFN (BUILT_IN_SINH)
1745 CASE_MATHFN (BUILT_IN_SQRT)
1746 CASE_MATHFN (BUILT_IN_TAN)
1747 CASE_MATHFN (BUILT_IN_TANH)
1748 CASE_MATHFN (BUILT_IN_TGAMMA)
1749 CASE_MATHFN (BUILT_IN_TRUNC)
1750 CASE_MATHFN (BUILT_IN_Y0)
1751 CASE_MATHFN (BUILT_IN_Y1)
1752 CASE_MATHFN (BUILT_IN_YN)
1754 default:
1755 return NULL_TREE;
1758 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1759 return implicit_built_in_decls[fcode];
1760 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1761 return implicit_built_in_decls[fcodef];
1762 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1763 return implicit_built_in_decls[fcodel];
1764 else
1765 return NULL_TREE;
1768 /* If errno must be maintained, expand the RTL to check if the result,
1769 TARGET, of a built-in function call, EXP, is NaN, and if so set
1770 errno to EDOM. */
1772 static void
1773 expand_errno_check (tree exp, rtx target)
1775 rtx lab = gen_label_rtx ();
1777 /* Test the result; if it is NaN, set errno=EDOM because
1778 the argument was not in the domain. */
1779 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1780 0, lab);
1782 #ifdef TARGET_EDOM
1783 /* If this built-in doesn't throw an exception, set errno directly. */
1784 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1786 #ifdef GEN_ERRNO_RTX
1787 rtx errno_rtx = GEN_ERRNO_RTX;
1788 #else
1789 rtx errno_rtx
1790 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1791 #endif
1792 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1793 emit_label (lab);
1794 return;
1796 #endif
1798 /* We can't set errno=EDOM directly; let the library call do it.
1799 Pop the arguments right away in case the call gets deleted. */
1800 NO_DEFER_POP;
1801 expand_call (exp, target, 0);
1802 OK_DEFER_POP;
1803 emit_label (lab);
1806 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1807 Return NULL_RTX if a normal call should be emitted rather than expanding
1808 the function in-line. EXP is the expression that is a call to the builtin
1809 function; if convenient, the result should be placed in TARGET.
1810 SUBTARGET may be used as the target for computing one of EXP's operands. */
1812 static rtx
1813 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1815 optab builtin_optab;
1816 rtx op0, insns, before_call;
1817 tree fndecl = get_callee_fndecl (exp);
1818 enum machine_mode mode;
1819 bool errno_set = false;
1820 tree arg, narg;
1822 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1823 return NULL_RTX;
1825 arg = CALL_EXPR_ARG (exp, 0);
1827 switch (DECL_FUNCTION_CODE (fndecl))
1829 CASE_FLT_FN (BUILT_IN_SQRT):
1830 errno_set = ! tree_expr_nonnegative_p (arg);
1831 builtin_optab = sqrt_optab;
1832 break;
1833 CASE_FLT_FN (BUILT_IN_EXP):
1834 errno_set = true; builtin_optab = exp_optab; break;
1835 CASE_FLT_FN (BUILT_IN_EXP10):
1836 CASE_FLT_FN (BUILT_IN_POW10):
1837 errno_set = true; builtin_optab = exp10_optab; break;
1838 CASE_FLT_FN (BUILT_IN_EXP2):
1839 errno_set = true; builtin_optab = exp2_optab; break;
1840 CASE_FLT_FN (BUILT_IN_EXPM1):
1841 errno_set = true; builtin_optab = expm1_optab; break;
1842 CASE_FLT_FN (BUILT_IN_LOGB):
1843 errno_set = true; builtin_optab = logb_optab; break;
1844 CASE_FLT_FN (BUILT_IN_LOG):
1845 errno_set = true; builtin_optab = log_optab; break;
1846 CASE_FLT_FN (BUILT_IN_LOG10):
1847 errno_set = true; builtin_optab = log10_optab; break;
1848 CASE_FLT_FN (BUILT_IN_LOG2):
1849 errno_set = true; builtin_optab = log2_optab; break;
1850 CASE_FLT_FN (BUILT_IN_LOG1P):
1851 errno_set = true; builtin_optab = log1p_optab; break;
1852 CASE_FLT_FN (BUILT_IN_ASIN):
1853 builtin_optab = asin_optab; break;
1854 CASE_FLT_FN (BUILT_IN_ACOS):
1855 builtin_optab = acos_optab; break;
1856 CASE_FLT_FN (BUILT_IN_TAN):
1857 builtin_optab = tan_optab; break;
1858 CASE_FLT_FN (BUILT_IN_ATAN):
1859 builtin_optab = atan_optab; break;
1860 CASE_FLT_FN (BUILT_IN_FLOOR):
1861 builtin_optab = floor_optab; break;
1862 CASE_FLT_FN (BUILT_IN_CEIL):
1863 builtin_optab = ceil_optab; break;
1864 CASE_FLT_FN (BUILT_IN_TRUNC):
1865 builtin_optab = btrunc_optab; break;
1866 CASE_FLT_FN (BUILT_IN_ROUND):
1867 builtin_optab = round_optab; break;
1868 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1869 builtin_optab = nearbyint_optab;
1870 if (flag_trapping_math)
1871 break;
1872 /* Else fallthrough and expand as rint. */
1873 CASE_FLT_FN (BUILT_IN_RINT):
1874 builtin_optab = rint_optab; break;
1875 default:
1876 gcc_unreachable ();
1879 /* Make a suitable register to place result in. */
1880 mode = TYPE_MODE (TREE_TYPE (exp));
1882 if (! flag_errno_math || ! HONOR_NANS (mode))
1883 errno_set = false;
1885 /* Before working hard, check whether the instruction is available. */
1886 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1888 target = gen_reg_rtx (mode);
1890 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1891 need to expand the argument again. This way, we will not perform
1892 side-effects more the once. */
1893 narg = builtin_save_expr (arg);
1894 if (narg != arg)
1896 arg = narg;
1897 exp = build_call_expr (fndecl, 1, arg);
1900 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1902 start_sequence ();
1904 /* Compute into TARGET.
1905 Set TARGET to wherever the result comes back. */
1906 target = expand_unop (mode, builtin_optab, op0, target, 0);
1908 if (target != 0)
1910 if (errno_set)
1911 expand_errno_check (exp, target);
1913 /* Output the entire sequence. */
1914 insns = get_insns ();
1915 end_sequence ();
1916 emit_insn (insns);
1917 return target;
1920 /* If we were unable to expand via the builtin, stop the sequence
1921 (without outputting the insns) and call to the library function
1922 with the stabilized argument list. */
1923 end_sequence ();
1926 before_call = get_last_insn ();
1928 target = expand_call (exp, target, target == const0_rtx);
1930 /* If this is a sqrt operation and we don't care about errno, try to
1931 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1932 This allows the semantics of the libcall to be visible to the RTL
1933 optimizers. */
1934 if (builtin_optab == sqrt_optab && !errno_set)
1936 /* Search backwards through the insns emitted by expand_call looking
1937 for the instruction with the REG_RETVAL note. */
1938 rtx last = get_last_insn ();
1939 while (last != before_call)
1941 if (find_reg_note (last, REG_RETVAL, NULL))
1943 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1944 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1945 two elements, i.e. symbol_ref(sqrt) and the operand. */
1946 if (note
1947 && GET_CODE (note) == EXPR_LIST
1948 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1949 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1950 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1952 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1953 /* Check operand is a register with expected mode. */
1954 if (operand
1955 && REG_P (operand)
1956 && GET_MODE (operand) == mode)
1958 /* Replace the REG_EQUAL note with a SQRT rtx. */
1959 rtx equiv = gen_rtx_SQRT (mode, operand);
1960 set_unique_reg_note (last, REG_EQUAL, equiv);
1963 break;
1965 last = PREV_INSN (last);
1969 return target;
1972 /* Expand a call to the builtin binary math functions (pow and atan2).
1973 Return NULL_RTX if a normal call should be emitted rather than expanding the
1974 function in-line. EXP is the expression that is a call to the builtin
1975 function; if convenient, the result should be placed in TARGET.
1976 SUBTARGET may be used as the target for computing one of EXP's
1977 operands. */
1979 static rtx
1980 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1982 optab builtin_optab;
1983 rtx op0, op1, insns;
1984 int op1_type = REAL_TYPE;
1985 tree fndecl = get_callee_fndecl (exp);
1986 tree arg0, arg1, narg;
1987 enum machine_mode mode;
1988 bool errno_set = true;
1989 bool stable = true;
1991 switch (DECL_FUNCTION_CODE (fndecl))
1993 CASE_FLT_FN (BUILT_IN_SCALBN):
1994 CASE_FLT_FN (BUILT_IN_SCALBLN):
1995 CASE_FLT_FN (BUILT_IN_LDEXP):
1996 op1_type = INTEGER_TYPE;
1997 default:
1998 break;
2001 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2002 return NULL_RTX;
2004 arg0 = CALL_EXPR_ARG (exp, 0);
2005 arg1 = CALL_EXPR_ARG (exp, 1);
2007 switch (DECL_FUNCTION_CODE (fndecl))
2009 CASE_FLT_FN (BUILT_IN_POW):
2010 builtin_optab = pow_optab; break;
2011 CASE_FLT_FN (BUILT_IN_ATAN2):
2012 builtin_optab = atan2_optab; break;
2013 CASE_FLT_FN (BUILT_IN_SCALB):
2014 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2015 return 0;
2016 builtin_optab = scalb_optab; break;
2017 CASE_FLT_FN (BUILT_IN_SCALBN):
2018 CASE_FLT_FN (BUILT_IN_SCALBLN):
2019 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2020 return 0;
2021 /* Fall through... */
2022 CASE_FLT_FN (BUILT_IN_LDEXP):
2023 builtin_optab = ldexp_optab; break;
2024 CASE_FLT_FN (BUILT_IN_FMOD):
2025 builtin_optab = fmod_optab; break;
2026 CASE_FLT_FN (BUILT_IN_REMAINDER):
2027 CASE_FLT_FN (BUILT_IN_DREM):
2028 builtin_optab = remainder_optab; break;
2029 default:
2030 gcc_unreachable ();
2033 /* Make a suitable register to place result in. */
2034 mode = TYPE_MODE (TREE_TYPE (exp));
2036 /* Before working hard, check whether the instruction is available. */
2037 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2038 return NULL_RTX;
2040 target = gen_reg_rtx (mode);
2042 if (! flag_errno_math || ! HONOR_NANS (mode))
2043 errno_set = false;
2045 /* Always stabilize the argument list. */
2046 narg = builtin_save_expr (arg1);
2047 if (narg != arg1)
2049 arg1 = narg;
2050 stable = false;
2052 narg = builtin_save_expr (arg0);
2053 if (narg != arg0)
2055 arg0 = narg;
2056 stable = false;
2059 if (! stable)
2060 exp = build_call_expr (fndecl, 2, arg0, arg1);
2062 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2063 op1 = expand_normal (arg1);
2065 start_sequence ();
2067 /* Compute into TARGET.
2068 Set TARGET to wherever the result comes back. */
2069 target = expand_binop (mode, builtin_optab, op0, op1,
2070 target, 0, OPTAB_DIRECT);
2072 /* If we were unable to expand via the builtin, stop the sequence
2073 (without outputting the insns) and call to the library function
2074 with the stabilized argument list. */
2075 if (target == 0)
2077 end_sequence ();
2078 return expand_call (exp, target, target == const0_rtx);
2081 if (errno_set)
2082 expand_errno_check (exp, target);
2084 /* Output the entire sequence. */
2085 insns = get_insns ();
2086 end_sequence ();
2087 emit_insn (insns);
2089 return target;
2092 /* Expand a call to the builtin sin and cos math functions.
2093 Return NULL_RTX if a normal call should be emitted rather than expanding the
2094 function in-line. EXP is the expression that is a call to the builtin
2095 function; if convenient, the result should be placed in TARGET.
2096 SUBTARGET may be used as the target for computing one of EXP's
2097 operands. */
2099 static rtx
2100 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2102 optab builtin_optab;
2103 rtx op0, insns;
2104 tree fndecl = get_callee_fndecl (exp);
2105 enum machine_mode mode;
2106 tree arg, narg;
2108 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2109 return NULL_RTX;
2111 arg = CALL_EXPR_ARG (exp, 0);
2113 switch (DECL_FUNCTION_CODE (fndecl))
2115 CASE_FLT_FN (BUILT_IN_SIN):
2116 CASE_FLT_FN (BUILT_IN_COS):
2117 builtin_optab = sincos_optab; break;
2118 default:
2119 gcc_unreachable ();
2122 /* Make a suitable register to place result in. */
2123 mode = TYPE_MODE (TREE_TYPE (exp));
2125 /* Check if sincos insn is available, otherwise fallback
2126 to sin or cos insn. */
2127 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2128 switch (DECL_FUNCTION_CODE (fndecl))
2130 CASE_FLT_FN (BUILT_IN_SIN):
2131 builtin_optab = sin_optab; break;
2132 CASE_FLT_FN (BUILT_IN_COS):
2133 builtin_optab = cos_optab; break;
2134 default:
2135 gcc_unreachable ();
2138 /* Before working hard, check whether the instruction is available. */
2139 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2141 target = gen_reg_rtx (mode);
2143 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2144 need to expand the argument again. This way, we will not perform
2145 side-effects more the once. */
2146 narg = save_expr (arg);
2147 if (narg != arg)
2149 arg = narg;
2150 exp = build_call_expr (fndecl, 1, arg);
2153 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2155 start_sequence ();
2157 /* Compute into TARGET.
2158 Set TARGET to wherever the result comes back. */
2159 if (builtin_optab == sincos_optab)
2161 int result;
2163 switch (DECL_FUNCTION_CODE (fndecl))
2165 CASE_FLT_FN (BUILT_IN_SIN):
2166 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2167 break;
2168 CASE_FLT_FN (BUILT_IN_COS):
2169 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2170 break;
2171 default:
2172 gcc_unreachable ();
2174 gcc_assert (result);
2176 else
2178 target = expand_unop (mode, builtin_optab, op0, target, 0);
2181 if (target != 0)
2183 /* Output the entire sequence. */
2184 insns = get_insns ();
2185 end_sequence ();
2186 emit_insn (insns);
2187 return target;
2190 /* If we were unable to expand via the builtin, stop the sequence
2191 (without outputting the insns) and call to the library function
2192 with the stabilized argument list. */
2193 end_sequence ();
2196 target = expand_call (exp, target, target == const0_rtx);
2198 return target;
2201 /* Expand a call to one of the builtin math functions that operate on
2202 floating point argument and output an integer result (ilogb, isinf,
2203 isnan, etc).
2204 Return 0 if a normal call should be emitted rather than expanding the
2205 function in-line. EXP is the expression that is a call to the builtin
2206 function; if convenient, the result should be placed in TARGET.
2207 SUBTARGET may be used as the target for computing one of EXP's operands. */
2209 static rtx
2210 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2212 optab builtin_optab;
2213 enum insn_code icode;
2214 rtx op0;
2215 tree fndecl = get_callee_fndecl (exp);
2216 enum machine_mode mode;
2217 bool errno_set = false;
2218 tree arg, narg;
2220 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2221 return NULL_RTX;
2223 arg = CALL_EXPR_ARG (exp, 0);
2225 switch (DECL_FUNCTION_CODE (fndecl))
2227 CASE_FLT_FN (BUILT_IN_ILOGB):
2228 errno_set = true; builtin_optab = ilogb_optab; break;
2229 CASE_FLT_FN (BUILT_IN_ISINF):
2230 builtin_optab = isinf_optab; break;
2231 default:
2232 gcc_unreachable ();
2235 /* There's no easy way to detect the case we need to set EDOM. */
2236 if (flag_errno_math && errno_set)
2237 return NULL_RTX;
2239 /* Optab mode depends on the mode of the input argument. */
2240 mode = TYPE_MODE (TREE_TYPE (arg));
2242 icode = builtin_optab->handlers[(int) mode].insn_code;
2244 /* Before working hard, check whether the instruction is available. */
2245 if (icode != CODE_FOR_nothing)
2247 /* Make a suitable register to place result in. */
2248 if (!target
2249 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2250 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2252 gcc_assert (insn_data[icode].operand[0].predicate
2253 (target, GET_MODE (target)));
2255 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2256 need to expand the argument again. This way, we will not perform
2257 side-effects more the once. */
2258 narg = builtin_save_expr (arg);
2259 if (narg != arg)
2261 arg = narg;
2262 exp = build_call_expr (fndecl, 1, arg);
2265 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2267 if (mode != GET_MODE (op0))
2268 op0 = convert_to_mode (mode, op0, 0);
2270 /* Compute into TARGET.
2271 Set TARGET to wherever the result comes back. */
2272 emit_unop_insn (icode, target, op0, UNKNOWN);
2273 return target;
2276 target = expand_call (exp, target, target == const0_rtx);
2278 return target;
2281 /* Expand a call to the builtin sincos math function.
2282 Return NULL_RTX if a normal call should be emitted rather than expanding the
2283 function in-line. EXP is the expression that is a call to the builtin
2284 function. */
2286 static rtx
2287 expand_builtin_sincos (tree exp)
2289 rtx op0, op1, op2, target1, target2;
2290 enum machine_mode mode;
2291 tree arg, sinp, cosp;
2292 int result;
2294 if (!validate_arglist (exp, REAL_TYPE,
2295 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2296 return NULL_RTX;
2298 arg = CALL_EXPR_ARG (exp, 0);
2299 sinp = CALL_EXPR_ARG (exp, 1);
2300 cosp = CALL_EXPR_ARG (exp, 2);
2302 /* Make a suitable register to place result in. */
2303 mode = TYPE_MODE (TREE_TYPE (arg));
2305 /* Check if sincos insn is available, otherwise emit the call. */
2306 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2307 return NULL_RTX;
2309 target1 = gen_reg_rtx (mode);
2310 target2 = gen_reg_rtx (mode);
2312 op0 = expand_normal (arg);
2313 op1 = expand_normal (build_fold_indirect_ref (sinp));
2314 op2 = expand_normal (build_fold_indirect_ref (cosp));
2316 /* Compute into target1 and target2.
2317 Set TARGET to wherever the result comes back. */
2318 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2319 gcc_assert (result);
2321 /* Move target1 and target2 to the memory locations indicated
2322 by op1 and op2. */
2323 emit_move_insn (op1, target1);
2324 emit_move_insn (op2, target2);
2326 return const0_rtx;
2329 /* Expand a call to the internal cexpi builtin to the sincos math function.
2330 EXP is the expression that is a call to the builtin function; if convenient,
2331 the result should be placed in TARGET. SUBTARGET may be used as the target
2332 for computing one of EXP's operands. */
2334 static rtx
2335 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2337 tree fndecl = get_callee_fndecl (exp);
2338 tree arg, type;
2339 enum machine_mode mode;
2340 rtx op0, op1, op2;
2342 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2343 return NULL_RTX;
2345 arg = CALL_EXPR_ARG (exp, 0);
2346 type = TREE_TYPE (arg);
2347 mode = TYPE_MODE (TREE_TYPE (arg));
2349 /* Try expanding via a sincos optab, fall back to emitting a libcall
2350 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2351 is only generated from sincos, cexp or if we have either of them. */
2352 if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2354 op1 = gen_reg_rtx (mode);
2355 op2 = gen_reg_rtx (mode);
2357 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2359 /* Compute into op1 and op2. */
2360 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2362 else if (TARGET_HAS_SINCOS)
2364 tree call, fn = NULL_TREE;
2365 tree top1, top2;
2366 rtx op1a, op2a;
2368 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2369 fn = built_in_decls[BUILT_IN_SINCOSF];
2370 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2371 fn = built_in_decls[BUILT_IN_SINCOS];
2372 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2373 fn = built_in_decls[BUILT_IN_SINCOSL];
2374 else
2375 gcc_unreachable ();
2377 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2378 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2379 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2380 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2381 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2382 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2384 /* Make sure not to fold the sincos call again. */
2385 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2386 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2387 call, 3, arg, top1, top2));
2389 else
2391 tree call, fn = NULL_TREE, narg;
2392 tree ctype = build_complex_type (type);
2394 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2395 fn = built_in_decls[BUILT_IN_CEXPF];
2396 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2397 fn = built_in_decls[BUILT_IN_CEXP];
2398 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2399 fn = built_in_decls[BUILT_IN_CEXPL];
2400 else
2401 gcc_unreachable ();
2403 /* If we don't have a decl for cexp create one. This is the
2404 friendliest fallback if the user calls __builtin_cexpi
2405 without full target C99 function support. */
2406 if (fn == NULL_TREE)
2408 tree fntype;
2409 const char *name = NULL;
2411 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2412 name = "cexpf";
2413 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2414 name = "cexp";
2415 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2416 name = "cexpl";
2418 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2419 fn = build_fn_decl (name, fntype);
2422 narg = fold_build2 (COMPLEX_EXPR, ctype,
2423 build_real (type, dconst0), arg);
2425 /* Make sure not to fold the cexp call again. */
2426 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2427 return expand_expr (build_call_nary (ctype, call, 1, narg),
2428 target, VOIDmode, EXPAND_NORMAL);
2431 /* Now build the proper return type. */
2432 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2433 make_tree (TREE_TYPE (arg), op2),
2434 make_tree (TREE_TYPE (arg), op1)),
2435 target, VOIDmode, EXPAND_NORMAL);
2438 /* Expand a call to one of the builtin rounding functions gcc defines
2439 as an extension (lfloor and lceil). As these are gcc extensions we
2440 do not need to worry about setting errno to EDOM.
2441 If expanding via optab fails, lower expression to (int)(floor(x)).
2442 EXP is the expression that is a call to the builtin function;
2443 if convenient, the result should be placed in TARGET. SUBTARGET may
2444 be used as the target for computing one of EXP's operands. */
2446 static rtx
2447 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2449 convert_optab builtin_optab;
2450 rtx op0, insns, tmp;
2451 tree fndecl = get_callee_fndecl (exp);
2452 enum built_in_function fallback_fn;
2453 tree fallback_fndecl;
2454 enum machine_mode mode;
2455 tree arg, narg;
2457 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2458 gcc_unreachable ();
2460 arg = CALL_EXPR_ARG (exp, 0);
2462 switch (DECL_FUNCTION_CODE (fndecl))
2464 CASE_FLT_FN (BUILT_IN_LCEIL):
2465 CASE_FLT_FN (BUILT_IN_LLCEIL):
2466 builtin_optab = lceil_optab;
2467 fallback_fn = BUILT_IN_CEIL;
2468 break;
2470 CASE_FLT_FN (BUILT_IN_LFLOOR):
2471 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2472 builtin_optab = lfloor_optab;
2473 fallback_fn = BUILT_IN_FLOOR;
2474 break;
2476 default:
2477 gcc_unreachable ();
2480 /* Make a suitable register to place result in. */
2481 mode = TYPE_MODE (TREE_TYPE (exp));
2483 target = gen_reg_rtx (mode);
2485 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2486 need to expand the argument again. This way, we will not perform
2487 side-effects more the once. */
2488 narg = builtin_save_expr (arg);
2489 if (narg != arg)
2491 arg = narg;
2492 exp = build_call_expr (fndecl, 1, arg);
2495 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2497 start_sequence ();
2499 /* Compute into TARGET. */
2500 if (expand_sfix_optab (target, op0, builtin_optab))
2502 /* Output the entire sequence. */
2503 insns = get_insns ();
2504 end_sequence ();
2505 emit_insn (insns);
2506 return target;
2509 /* If we were unable to expand via the builtin, stop the sequence
2510 (without outputting the insns). */
2511 end_sequence ();
2513 /* Fall back to floating point rounding optab. */
2514 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2516 /* For non-C99 targets we may end up without a fallback fndecl here
2517 if the user called __builtin_lfloor directly. In this case emit
2518 a call to the floor/ceil variants nevertheless. This should result
2519 in the best user experience for not full C99 targets. */
2520 if (fallback_fndecl == NULL_TREE)
2522 tree fntype;
2523 const char *name = NULL;
2525 switch (DECL_FUNCTION_CODE (fndecl))
2527 case BUILT_IN_LCEIL:
2528 case BUILT_IN_LLCEIL:
2529 name = "ceil";
2530 break;
2531 case BUILT_IN_LCEILF:
2532 case BUILT_IN_LLCEILF:
2533 name = "ceilf";
2534 break;
2535 case BUILT_IN_LCEILL:
2536 case BUILT_IN_LLCEILL:
2537 name = "ceill";
2538 break;
2539 case BUILT_IN_LFLOOR:
2540 case BUILT_IN_LLFLOOR:
2541 name = "floor";
2542 break;
2543 case BUILT_IN_LFLOORF:
2544 case BUILT_IN_LLFLOORF:
2545 name = "floorf";
2546 break;
2547 case BUILT_IN_LFLOORL:
2548 case BUILT_IN_LLFLOORL:
2549 name = "floorl";
2550 break;
2551 default:
2552 gcc_unreachable ();
2555 fntype = build_function_type_list (TREE_TYPE (arg),
2556 TREE_TYPE (arg), NULL_TREE);
2557 fallback_fndecl = build_fn_decl (name, fntype);
2560 exp = build_call_expr (fallback_fndecl, 1, arg);
2562 tmp = expand_normal (exp);
2564 /* Truncate the result of floating point optab to integer
2565 via expand_fix (). */
2566 target = gen_reg_rtx (mode);
2567 expand_fix (target, tmp, 0);
2569 return target;
2572 /* Expand a call to one of the builtin math functions doing integer
2573 conversion (lrint).
2574 Return 0 if a normal call should be emitted rather than expanding the
2575 function in-line. EXP is the expression that is a call to the builtin
2576 function; if convenient, the result should be placed in TARGET.
2577 SUBTARGET may be used as the target for computing one of EXP's operands. */
2579 static rtx
2580 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2582 convert_optab builtin_optab;
2583 rtx op0, insns;
2584 tree fndecl = get_callee_fndecl (exp);
2585 tree arg, narg;
2586 enum machine_mode mode;
2588 /* There's no easy way to detect the case we need to set EDOM. */
2589 if (flag_errno_math)
2590 return NULL_RTX;
2592 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2593 gcc_unreachable ();
2595 arg = CALL_EXPR_ARG (exp, 0);
2597 switch (DECL_FUNCTION_CODE (fndecl))
2599 CASE_FLT_FN (BUILT_IN_LRINT):
2600 CASE_FLT_FN (BUILT_IN_LLRINT):
2601 builtin_optab = lrint_optab; break;
2602 CASE_FLT_FN (BUILT_IN_LROUND):
2603 CASE_FLT_FN (BUILT_IN_LLROUND):
2604 builtin_optab = lround_optab; break;
2605 default:
2606 gcc_unreachable ();
2609 /* Make a suitable register to place result in. */
2610 mode = TYPE_MODE (TREE_TYPE (exp));
2612 target = gen_reg_rtx (mode);
2614 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2615 need to expand the argument again. This way, we will not perform
2616 side-effects more the once. */
2617 narg = builtin_save_expr (arg);
2618 if (narg != arg)
2620 arg = narg;
2621 exp = build_call_expr (fndecl, 1, arg);
2624 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2626 start_sequence ();
2628 if (expand_sfix_optab (target, op0, builtin_optab))
2630 /* Output the entire sequence. */
2631 insns = get_insns ();
2632 end_sequence ();
2633 emit_insn (insns);
2634 return target;
2637 /* If we were unable to expand via the builtin, stop the sequence
2638 (without outputting the insns) and call to the library function
2639 with the stabilized argument list. */
2640 end_sequence ();
2642 target = expand_call (exp, target, target == const0_rtx);
2644 return target;
2647 /* To evaluate powi(x,n), the floating point value x raised to the
2648 constant integer exponent n, we use a hybrid algorithm that
2649 combines the "window method" with look-up tables. For an
2650 introduction to exponentiation algorithms and "addition chains",
2651 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2652 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2653 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2654 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2656 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2657 multiplications to inline before calling the system library's pow
2658 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2659 so this default never requires calling pow, powf or powl. */
2661 #ifndef POWI_MAX_MULTS
2662 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2663 #endif
2665 /* The size of the "optimal power tree" lookup table. All
2666 exponents less than this value are simply looked up in the
2667 powi_table below. This threshold is also used to size the
2668 cache of pseudo registers that hold intermediate results. */
2669 #define POWI_TABLE_SIZE 256
2671 /* The size, in bits of the window, used in the "window method"
2672 exponentiation algorithm. This is equivalent to a radix of
2673 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2674 #define POWI_WINDOW_SIZE 3
2676 /* The following table is an efficient representation of an
2677 "optimal power tree". For each value, i, the corresponding
2678 value, j, in the table states than an optimal evaluation
2679 sequence for calculating pow(x,i) can be found by evaluating
2680 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2681 100 integers is given in Knuth's "Seminumerical algorithms". */
2683 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2685 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2686 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2687 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2688 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2689 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2690 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2691 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2692 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2693 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2694 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2695 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2696 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2697 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2698 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2699 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2700 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2701 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2702 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2703 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2704 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2705 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2706 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2707 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2708 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2709 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2710 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2711 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2712 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2713 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2714 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2715 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2716 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2720 /* Return the number of multiplications required to calculate
2721 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2722 subroutine of powi_cost. CACHE is an array indicating
2723 which exponents have already been calculated. */
2725 static int
2726 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2728 /* If we've already calculated this exponent, then this evaluation
2729 doesn't require any additional multiplications. */
2730 if (cache[n])
2731 return 0;
2733 cache[n] = true;
2734 return powi_lookup_cost (n - powi_table[n], cache)
2735 + powi_lookup_cost (powi_table[n], cache) + 1;
2738 /* Return the number of multiplications required to calculate
2739 powi(x,n) for an arbitrary x, given the exponent N. This
2740 function needs to be kept in sync with expand_powi below. */
2742 static int
2743 powi_cost (HOST_WIDE_INT n)
2745 bool cache[POWI_TABLE_SIZE];
2746 unsigned HOST_WIDE_INT digit;
2747 unsigned HOST_WIDE_INT val;
2748 int result;
2750 if (n == 0)
2751 return 0;
2753 /* Ignore the reciprocal when calculating the cost. */
2754 val = (n < 0) ? -n : n;
2756 /* Initialize the exponent cache. */
2757 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2758 cache[1] = true;
2760 result = 0;
2762 while (val >= POWI_TABLE_SIZE)
2764 if (val & 1)
2766 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2767 result += powi_lookup_cost (digit, cache)
2768 + POWI_WINDOW_SIZE + 1;
2769 val >>= POWI_WINDOW_SIZE;
2771 else
2773 val >>= 1;
2774 result++;
2778 return result + powi_lookup_cost (val, cache);
2781 /* Recursive subroutine of expand_powi. This function takes the array,
2782 CACHE, of already calculated exponents and an exponent N and returns
2783 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2785 static rtx
2786 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2788 unsigned HOST_WIDE_INT digit;
2789 rtx target, result;
2790 rtx op0, op1;
2792 if (n < POWI_TABLE_SIZE)
2794 if (cache[n])
2795 return cache[n];
2797 target = gen_reg_rtx (mode);
2798 cache[n] = target;
2800 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2801 op1 = expand_powi_1 (mode, powi_table[n], cache);
2803 else if (n & 1)
2805 target = gen_reg_rtx (mode);
2806 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2807 op0 = expand_powi_1 (mode, n - digit, cache);
2808 op1 = expand_powi_1 (mode, digit, cache);
2810 else
2812 target = gen_reg_rtx (mode);
2813 op0 = expand_powi_1 (mode, n >> 1, cache);
2814 op1 = op0;
2817 result = expand_mult (mode, op0, op1, target, 0);
2818 if (result != target)
2819 emit_move_insn (target, result);
2820 return target;
2823 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2824 floating point operand in mode MODE, and N is the exponent. This
2825 function needs to be kept in sync with powi_cost above. */
2827 static rtx
2828 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2830 unsigned HOST_WIDE_INT val;
2831 rtx cache[POWI_TABLE_SIZE];
2832 rtx result;
2834 if (n == 0)
2835 return CONST1_RTX (mode);
2837 val = (n < 0) ? -n : n;
2839 memset (cache, 0, sizeof (cache));
2840 cache[1] = x;
2842 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2844 /* If the original exponent was negative, reciprocate the result. */
2845 if (n < 0)
2846 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2847 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2849 return result;
2852 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2853 a normal call should be emitted rather than expanding the function
2854 in-line. EXP is the expression that is a call to the builtin
2855 function; if convenient, the result should be placed in TARGET. */
2857 static rtx
2858 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2860 tree arg0, arg1;
2861 tree fn, narg0;
2862 tree type = TREE_TYPE (exp);
2863 REAL_VALUE_TYPE cint, c, c2;
2864 HOST_WIDE_INT n;
2865 rtx op, op2;
2866 enum machine_mode mode = TYPE_MODE (type);
2868 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2869 return NULL_RTX;
2871 arg0 = CALL_EXPR_ARG (exp, 0);
2872 arg1 = CALL_EXPR_ARG (exp, 1);
2874 if (TREE_CODE (arg1) != REAL_CST
2875 || TREE_OVERFLOW (arg1))
2876 return expand_builtin_mathfn_2 (exp, target, subtarget);
2878 /* Handle constant exponents. */
2880 /* For integer valued exponents we can expand to an optimal multiplication
2881 sequence using expand_powi. */
2882 c = TREE_REAL_CST (arg1);
2883 n = real_to_integer (&c);
2884 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2885 if (real_identical (&c, &cint)
2886 && ((n >= -1 && n <= 2)
2887 || (flag_unsafe_math_optimizations
2888 && !optimize_size
2889 && powi_cost (n) <= POWI_MAX_MULTS)))
2891 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2892 if (n != 1)
2894 op = force_reg (mode, op);
2895 op = expand_powi (op, mode, n);
2897 return op;
2900 narg0 = builtin_save_expr (arg0);
2902 /* If the exponent is not integer valued, check if it is half of an integer.
2903 In this case we can expand to sqrt (x) * x**(n/2). */
2904 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2905 if (fn != NULL_TREE)
2907 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2908 n = real_to_integer (&c2);
2909 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2910 if (real_identical (&c2, &cint)
2911 && ((flag_unsafe_math_optimizations
2912 && !optimize_size
2913 && powi_cost (n/2) <= POWI_MAX_MULTS)
2914 || n == 1))
2916 tree call_expr = build_call_expr (fn, 1, narg0);
2917 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2918 if (n != 1)
2920 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2921 op2 = force_reg (mode, op2);
2922 op2 = expand_powi (op2, mode, abs (n / 2));
2923 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2924 0, OPTAB_LIB_WIDEN);
2925 /* If the original exponent was negative, reciprocate the
2926 result. */
2927 if (n < 0)
2928 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2929 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2931 return op;
2935 /* Try if the exponent is a third of an integer. In this case
2936 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2937 different from pow (x, 1./3.) due to rounding and behavior
2938 with negative x we need to constrain this transformation to
2939 unsafe math and positive x or finite math. */
2940 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2941 if (fn != NULL_TREE
2942 && flag_unsafe_math_optimizations
2943 && (tree_expr_nonnegative_p (arg0)
2944 || !HONOR_NANS (mode)))
2946 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2947 real_round (&c2, mode, &c2);
2948 n = real_to_integer (&c2);
2949 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2950 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2951 real_convert (&c2, mode, &c2);
2952 if (real_identical (&c2, &c)
2953 && ((!optimize_size
2954 && powi_cost (n/3) <= POWI_MAX_MULTS)
2955 || n == 1))
2957 tree call_expr = build_call_expr (fn, 1,narg0);
2958 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2959 if (abs (n) % 3 == 2)
2960 op = expand_simple_binop (mode, MULT, op, op, op,
2961 0, OPTAB_LIB_WIDEN);
2962 if (n != 1)
2964 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2965 op2 = force_reg (mode, op2);
2966 op2 = expand_powi (op2, mode, abs (n / 3));
2967 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2968 0, OPTAB_LIB_WIDEN);
2969 /* If the original exponent was negative, reciprocate the
2970 result. */
2971 if (n < 0)
2972 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2973 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2975 return op;
2979 /* Fall back to optab expansion. */
2980 return expand_builtin_mathfn_2 (exp, target, subtarget);
2983 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2984 a normal call should be emitted rather than expanding the function
2985 in-line. EXP is the expression that is a call to the builtin
2986 function; if convenient, the result should be placed in TARGET. */
2988 static rtx
2989 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2991 tree arg0, arg1;
2992 rtx op0, op1;
2993 enum machine_mode mode;
2994 enum machine_mode mode2;
2996 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2997 return NULL_RTX;
2999 arg0 = CALL_EXPR_ARG (exp, 0);
3000 arg1 = CALL_EXPR_ARG (exp, 1);
3001 mode = TYPE_MODE (TREE_TYPE (exp));
3003 /* Handle constant power. */
3005 if (TREE_CODE (arg1) == INTEGER_CST
3006 && !TREE_OVERFLOW (arg1))
3008 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3010 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3011 Otherwise, check the number of multiplications required. */
3012 if ((TREE_INT_CST_HIGH (arg1) == 0
3013 || TREE_INT_CST_HIGH (arg1) == -1)
3014 && ((n >= -1 && n <= 2)
3015 || (! optimize_size
3016 && powi_cost (n) <= POWI_MAX_MULTS)))
3018 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3019 op0 = force_reg (mode, op0);
3020 return expand_powi (op0, mode, n);
3024 /* Emit a libcall to libgcc. */
3026 /* Mode of the 2nd argument must match that of an int. */
3027 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3029 if (target == NULL_RTX)
3030 target = gen_reg_rtx (mode);
3032 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3033 if (GET_MODE (op0) != mode)
3034 op0 = convert_to_mode (mode, op0, 0);
3035 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3036 if (GET_MODE (op1) != mode2)
3037 op1 = convert_to_mode (mode2, op1, 0);
3039 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
3040 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3041 op0, mode, op1, mode2);
3043 return target;
3046 /* Expand expression EXP which is a call to the strlen builtin. Return
3047 NULL_RTX if we failed the caller should emit a normal call, otherwise
3048 try to get the result in TARGET, if convenient. */
3050 static rtx
3051 expand_builtin_strlen (tree exp, rtx target,
3052 enum machine_mode target_mode)
3054 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3055 return NULL_RTX;
3056 else
3058 rtx pat;
3059 tree len;
3060 tree src = CALL_EXPR_ARG (exp, 0);
3061 rtx result, src_reg, char_rtx, before_strlen;
3062 enum machine_mode insn_mode = target_mode, char_mode;
3063 enum insn_code icode = CODE_FOR_nothing;
3064 int align;
3066 /* If the length can be computed at compile-time, return it. */
3067 len = c_strlen (src, 0);
3068 if (len)
3069 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3071 /* If the length can be computed at compile-time and is constant
3072 integer, but there are side-effects in src, evaluate
3073 src for side-effects, then return len.
3074 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3075 can be optimized into: i++; x = 3; */
3076 len = c_strlen (src, 1);
3077 if (len && TREE_CODE (len) == INTEGER_CST)
3079 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3080 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3083 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3085 /* If SRC is not a pointer type, don't do this operation inline. */
3086 if (align == 0)
3087 return NULL_RTX;
3089 /* Bail out if we can't compute strlen in the right mode. */
3090 while (insn_mode != VOIDmode)
3092 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
3093 if (icode != CODE_FOR_nothing)
3094 break;
3096 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3098 if (insn_mode == VOIDmode)
3099 return NULL_RTX;
3101 /* Make a place to write the result of the instruction. */
3102 result = target;
3103 if (! (result != 0
3104 && REG_P (result)
3105 && GET_MODE (result) == insn_mode
3106 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3107 result = gen_reg_rtx (insn_mode);
3109 /* Make a place to hold the source address. We will not expand
3110 the actual source until we are sure that the expansion will
3111 not fail -- there are trees that cannot be expanded twice. */
3112 src_reg = gen_reg_rtx (Pmode);
3114 /* Mark the beginning of the strlen sequence so we can emit the
3115 source operand later. */
3116 before_strlen = get_last_insn ();
3118 char_rtx = const0_rtx;
3119 char_mode = insn_data[(int) icode].operand[2].mode;
3120 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3121 char_mode))
3122 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3124 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3125 char_rtx, GEN_INT (align));
3126 if (! pat)
3127 return NULL_RTX;
3128 emit_insn (pat);
3130 /* Now that we are assured of success, expand the source. */
3131 start_sequence ();
3132 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3133 if (pat != src_reg)
3134 emit_move_insn (src_reg, pat);
3135 pat = get_insns ();
3136 end_sequence ();
3138 if (before_strlen)
3139 emit_insn_after (pat, before_strlen);
3140 else
3141 emit_insn_before (pat, get_insns ());
3143 /* Return the value in the proper mode for this function. */
3144 if (GET_MODE (result) == target_mode)
3145 target = result;
3146 else if (target != 0)
3147 convert_move (target, result, 0);
3148 else
3149 target = convert_to_mode (target_mode, result, 0);
3151 return target;
3155 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3156 caller should emit a normal call, otherwise try to get the result
3157 in TARGET, if convenient (and in mode MODE if that's convenient). */
3159 static rtx
3160 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3162 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3164 tree type = TREE_TYPE (exp);
3165 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3166 CALL_EXPR_ARG (exp, 1), type);
3167 if (result)
3168 return expand_expr (result, target, mode, EXPAND_NORMAL);
3170 return NULL_RTX;
3173 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3174 caller should emit a normal call, otherwise try to get the result
3175 in TARGET, if convenient (and in mode MODE if that's convenient). */
3177 static rtx
3178 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3180 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3182 tree type = TREE_TYPE (exp);
3183 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3184 CALL_EXPR_ARG (exp, 1), type);
3185 if (result)
3186 return expand_expr (result, target, mode, EXPAND_NORMAL);
3188 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3190 return NULL_RTX;
3193 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3194 caller should emit a normal call, otherwise try to get the result
3195 in TARGET, if convenient (and in mode MODE if that's convenient). */
3197 static rtx
3198 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3200 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3202 tree type = TREE_TYPE (exp);
3203 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3204 CALL_EXPR_ARG (exp, 1), type);
3205 if (result)
3206 return expand_expr (result, target, mode, EXPAND_NORMAL);
3208 return NULL_RTX;
3211 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3212 caller should emit a normal call, otherwise try to get the result
3213 in TARGET, if convenient (and in mode MODE if that's convenient). */
3215 static rtx
3216 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3218 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3220 tree type = TREE_TYPE (exp);
3221 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3222 CALL_EXPR_ARG (exp, 1), type);
3223 if (result)
3224 return expand_expr (result, target, mode, EXPAND_NORMAL);
3226 return NULL_RTX;
3229 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3230 bytes from constant string DATA + OFFSET and return it as target
3231 constant. */
3233 static rtx
3234 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3235 enum machine_mode mode)
3237 const char *str = (const char *) data;
3239 gcc_assert (offset >= 0
3240 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3241 <= strlen (str) + 1));
3243 return c_readstr (str + offset, mode);
3246 /* Expand a call EXP to the memcpy builtin.
3247 Return NULL_RTX if we failed, the caller should emit a normal call,
3248 otherwise try to get the result in TARGET, if convenient (and in
3249 mode MODE if that's convenient). */
3251 static rtx
3252 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3254 tree fndecl = get_callee_fndecl (exp);
3256 if (!validate_arglist (exp,
3257 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3258 return NULL_RTX;
3259 else
3261 tree dest = CALL_EXPR_ARG (exp, 0);
3262 tree src = CALL_EXPR_ARG (exp, 1);
3263 tree len = CALL_EXPR_ARG (exp, 2);
3264 const char *src_str;
3265 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3266 unsigned int dest_align
3267 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3268 rtx dest_mem, src_mem, dest_addr, len_rtx;
3269 tree result = fold_builtin_memory_op (dest, src, len,
3270 TREE_TYPE (TREE_TYPE (fndecl)),
3271 false, /*endp=*/0);
3272 HOST_WIDE_INT expected_size = -1;
3273 unsigned int expected_align = 0;
3275 if (result)
3277 while (TREE_CODE (result) == COMPOUND_EXPR)
3279 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3280 EXPAND_NORMAL);
3281 result = TREE_OPERAND (result, 1);
3283 return expand_expr (result, target, mode, EXPAND_NORMAL);
3286 /* If DEST is not a pointer type, call the normal function. */
3287 if (dest_align == 0)
3288 return NULL_RTX;
3290 /* If either SRC is not a pointer type, don't do this
3291 operation in-line. */
3292 if (src_align == 0)
3293 return NULL_RTX;
3295 stringop_block_profile (exp, &expected_align, &expected_size);
3296 if (expected_align < dest_align)
3297 expected_align = dest_align;
3298 dest_mem = get_memory_rtx (dest, len);
3299 set_mem_align (dest_mem, dest_align);
3300 len_rtx = expand_normal (len);
3301 src_str = c_getstr (src);
3303 /* If SRC is a string constant and block move would be done
3304 by pieces, we can avoid loading the string from memory
3305 and only stored the computed constants. */
3306 if (src_str
3307 && GET_CODE (len_rtx) == CONST_INT
3308 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3309 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3310 (void *) src_str, dest_align))
3312 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3313 builtin_memcpy_read_str,
3314 (void *) src_str, dest_align, 0);
3315 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3316 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3317 return dest_mem;
3320 src_mem = get_memory_rtx (src, len);
3321 set_mem_align (src_mem, src_align);
3323 /* Copy word part most expediently. */
3324 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3325 CALL_EXPR_TAILCALL (exp)
3326 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3327 expected_align, expected_size);
3329 if (dest_addr == 0)
3331 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3332 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3334 return dest_addr;
3338 /* Expand a call EXP to the mempcpy builtin.
3339 Return NULL_RTX if we failed; the caller should emit a normal call,
3340 otherwise try to get the result in TARGET, if convenient (and in
3341 mode MODE if that's convenient). If ENDP is 0 return the
3342 destination pointer, if ENDP is 1 return the end pointer ala
3343 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3344 stpcpy. */
3346 static rtx
3347 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3349 if (!validate_arglist (exp,
3350 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3351 return NULL_RTX;
3352 else
3354 tree dest = CALL_EXPR_ARG (exp, 0);
3355 tree src = CALL_EXPR_ARG (exp, 1);
3356 tree len = CALL_EXPR_ARG (exp, 2);
3357 return expand_builtin_mempcpy_args (dest, src, len,
3358 TREE_TYPE (exp),
3359 target, mode, /*endp=*/ 1);
3363 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3364 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3365 so that this can also be called without constructing an actual CALL_EXPR.
3366 TYPE is the return type of the call. The other arguments and return value
3367 are the same as for expand_builtin_mempcpy. */
3369 static rtx
3370 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3371 rtx target, enum machine_mode mode, int endp)
3373 /* If return value is ignored, transform mempcpy into memcpy. */
3374 if (target == const0_rtx)
3376 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3378 if (!fn)
3379 return NULL_RTX;
3381 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3382 target, mode, EXPAND_NORMAL);
3384 else
3386 const char *src_str;
3387 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3388 unsigned int dest_align
3389 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3390 rtx dest_mem, src_mem, len_rtx;
3391 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3393 if (result)
3395 while (TREE_CODE (result) == COMPOUND_EXPR)
3397 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3398 EXPAND_NORMAL);
3399 result = TREE_OPERAND (result, 1);
3401 return expand_expr (result, target, mode, EXPAND_NORMAL);
3404 /* If either SRC or DEST is not a pointer type, don't do this
3405 operation in-line. */
3406 if (dest_align == 0 || src_align == 0)
3407 return NULL_RTX;
3409 /* If LEN is not constant, call the normal function. */
3410 if (! host_integerp (len, 1))
3411 return NULL_RTX;
3413 len_rtx = expand_normal (len);
3414 src_str = c_getstr (src);
3416 /* If SRC is a string constant and block move would be done
3417 by pieces, we can avoid loading the string from memory
3418 and only stored the computed constants. */
3419 if (src_str
3420 && GET_CODE (len_rtx) == CONST_INT
3421 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3422 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3423 (void *) src_str, dest_align))
3425 dest_mem = get_memory_rtx (dest, len);
3426 set_mem_align (dest_mem, dest_align);
3427 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3428 builtin_memcpy_read_str,
3429 (void *) src_str, dest_align, endp);
3430 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3431 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3432 return dest_mem;
3435 if (GET_CODE (len_rtx) == CONST_INT
3436 && can_move_by_pieces (INTVAL (len_rtx),
3437 MIN (dest_align, src_align)))
3439 dest_mem = get_memory_rtx (dest, len);
3440 set_mem_align (dest_mem, dest_align);
3441 src_mem = get_memory_rtx (src, len);
3442 set_mem_align (src_mem, src_align);
3443 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3444 MIN (dest_align, src_align), endp);
3445 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3446 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3447 return dest_mem;
3450 return NULL_RTX;
3454 /* Expand expression EXP, which is a call to the memmove builtin. Return
3455 NULL_RTX if we failed; the caller should emit a normal call. */
3457 static rtx
3458 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3460 if (!validate_arglist (exp,
3461 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3462 return NULL_RTX;
3463 else
3465 tree dest = CALL_EXPR_ARG (exp, 0);
3466 tree src = CALL_EXPR_ARG (exp, 1);
3467 tree len = CALL_EXPR_ARG (exp, 2);
3468 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3469 target, mode, ignore);
3473 /* Helper function to do the actual work for expand_builtin_memmove. The
3474 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3475 so that this can also be called without constructing an actual CALL_EXPR.
3476 TYPE is the return type of the call. The other arguments and return value
3477 are the same as for expand_builtin_memmove. */
3479 static rtx
3480 expand_builtin_memmove_args (tree dest, tree src, tree len,
3481 tree type, rtx target, enum machine_mode mode,
3482 int ignore)
3484 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3486 if (result)
3488 while (TREE_CODE (result) == COMPOUND_EXPR)
3490 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3491 EXPAND_NORMAL);
3492 result = TREE_OPERAND (result, 1);
3494 return expand_expr (result, target, mode, EXPAND_NORMAL);
3497 /* Otherwise, call the normal function. */
3498 return NULL_RTX;
3501 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3502 NULL_RTX if we failed the caller should emit a normal call. */
3504 static rtx
3505 expand_builtin_bcopy (tree exp, int ignore)
3507 tree type = TREE_TYPE (exp);
3508 tree src, dest, size;
3510 if (!validate_arglist (exp,
3511 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3512 return NULL_RTX;
3514 src = CALL_EXPR_ARG (exp, 0);
3515 dest = CALL_EXPR_ARG (exp, 1);
3516 size = CALL_EXPR_ARG (exp, 2);
3518 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3519 This is done this way so that if it isn't expanded inline, we fall
3520 back to calling bcopy instead of memmove. */
3521 return expand_builtin_memmove_args (dest, src,
3522 fold_convert (sizetype, size),
3523 type, const0_rtx, VOIDmode,
3524 ignore);
3527 #ifndef HAVE_movstr
3528 # define HAVE_movstr 0
3529 # define CODE_FOR_movstr CODE_FOR_nothing
3530 #endif
3532 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3533 we failed, the caller should emit a normal call, otherwise try to
3534 get the result in TARGET, if convenient. If ENDP is 0 return the
3535 destination pointer, if ENDP is 1 return the end pointer ala
3536 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3537 stpcpy. */
3539 static rtx
3540 expand_movstr (tree dest, tree src, rtx target, int endp)
3542 rtx end;
3543 rtx dest_mem;
3544 rtx src_mem;
3545 rtx insn;
3546 const struct insn_data * data;
3548 if (!HAVE_movstr)
3549 return NULL_RTX;
3551 dest_mem = get_memory_rtx (dest, NULL);
3552 src_mem = get_memory_rtx (src, NULL);
3553 if (!endp)
3555 target = force_reg (Pmode, XEXP (dest_mem, 0));
3556 dest_mem = replace_equiv_address (dest_mem, target);
3557 end = gen_reg_rtx (Pmode);
3559 else
3561 if (target == 0 || target == const0_rtx)
3563 end = gen_reg_rtx (Pmode);
3564 if (target == 0)
3565 target = end;
3567 else
3568 end = target;
3571 data = insn_data + CODE_FOR_movstr;
3573 if (data->operand[0].mode != VOIDmode)
3574 end = gen_lowpart (data->operand[0].mode, end);
3576 insn = data->genfun (end, dest_mem, src_mem);
3578 gcc_assert (insn);
3580 emit_insn (insn);
3582 /* movstr is supposed to set end to the address of the NUL
3583 terminator. If the caller requested a mempcpy-like return value,
3584 adjust it. */
3585 if (endp == 1 && target != const0_rtx)
3587 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3588 emit_move_insn (target, force_operand (tem, NULL_RTX));
3591 return target;
3594 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3595 NULL_RTX if we failed the caller should emit a normal call, otherwise
3596 try to get the result in TARGET, if convenient (and in mode MODE if that's
3597 convenient). */
3599 static rtx
3600 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3602 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3604 tree dest = CALL_EXPR_ARG (exp, 0);
3605 tree src = CALL_EXPR_ARG (exp, 1);
3606 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3608 return NULL_RTX;
3611 /* Helper function to do the actual work for expand_builtin_strcpy. The
3612 arguments to the builtin_strcpy call DEST and SRC are broken out
3613 so that this can also be called without constructing an actual CALL_EXPR.
3614 The other arguments and return value are the same as for
3615 expand_builtin_strcpy. */
3617 static rtx
3618 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3619 rtx target, enum machine_mode mode)
3621 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3622 if (result)
3623 return expand_expr (result, target, mode, EXPAND_NORMAL);
3624 return expand_movstr (dest, src, target, /*endp=*/0);
3628 /* Expand a call EXP to the stpcpy builtin.
3629 Return NULL_RTX if we failed the caller should emit a normal call,
3630 otherwise try to get the result in TARGET, if convenient (and in
3631 mode MODE if that's convenient). */
3633 static rtx
3634 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3636 tree dst, src;
3638 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3639 return NULL_RTX;
3641 dst = CALL_EXPR_ARG (exp, 0);
3642 src = CALL_EXPR_ARG (exp, 1);
3644 /* If return value is ignored, transform stpcpy into strcpy. */
3645 if (target == const0_rtx)
3647 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3648 if (!fn)
3649 return NULL_RTX;
3651 return expand_expr (build_call_expr (fn, 2, dst, src),
3652 target, mode, EXPAND_NORMAL);
3654 else
3656 tree len, lenp1;
3657 rtx ret;
3659 /* Ensure we get an actual string whose length can be evaluated at
3660 compile-time, not an expression containing a string. This is
3661 because the latter will potentially produce pessimized code
3662 when used to produce the return value. */
3663 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3664 return expand_movstr (dst, src, target, /*endp=*/2);
3666 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3667 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3668 target, mode, /*endp=*/2);
3670 if (ret)
3671 return ret;
3673 if (TREE_CODE (len) == INTEGER_CST)
3675 rtx len_rtx = expand_normal (len);
3677 if (GET_CODE (len_rtx) == CONST_INT)
3679 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3680 dst, src, target, mode);
3682 if (ret)
3684 if (! target)
3686 if (mode != VOIDmode)
3687 target = gen_reg_rtx (mode);
3688 else
3689 target = gen_reg_rtx (GET_MODE (ret));
3691 if (GET_MODE (target) != GET_MODE (ret))
3692 ret = gen_lowpart (GET_MODE (target), ret);
3694 ret = plus_constant (ret, INTVAL (len_rtx));
3695 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3696 gcc_assert (ret);
3698 return target;
3703 return expand_movstr (dst, src, target, /*endp=*/2);
3707 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3708 bytes from constant string DATA + OFFSET and return it as target
3709 constant. */
3711 static rtx
3712 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3713 enum machine_mode mode)
3715 const char *str = (const char *) data;
3717 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3718 return const0_rtx;
3720 return c_readstr (str + offset, mode);
3723 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3724 NULL_RTX if we failed the caller should emit a normal call. */
3726 static rtx
3727 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3729 tree fndecl = get_callee_fndecl (exp);
3731 if (validate_arglist (exp,
3732 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3734 tree dest = CALL_EXPR_ARG (exp, 0);
3735 tree src = CALL_EXPR_ARG (exp, 1);
3736 tree len = CALL_EXPR_ARG (exp, 2);
3737 tree slen = c_strlen (src, 1);
3738 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3740 if (result)
3742 while (TREE_CODE (result) == COMPOUND_EXPR)
3744 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3745 EXPAND_NORMAL);
3746 result = TREE_OPERAND (result, 1);
3748 return expand_expr (result, target, mode, EXPAND_NORMAL);
3751 /* We must be passed a constant len and src parameter. */
3752 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3753 return NULL_RTX;
3755 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3757 /* We're required to pad with trailing zeros if the requested
3758 len is greater than strlen(s2)+1. In that case try to
3759 use store_by_pieces, if it fails, punt. */
3760 if (tree_int_cst_lt (slen, len))
3762 unsigned int dest_align
3763 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3764 const char *p = c_getstr (src);
3765 rtx dest_mem;
3767 if (!p || dest_align == 0 || !host_integerp (len, 1)
3768 || !can_store_by_pieces (tree_low_cst (len, 1),
3769 builtin_strncpy_read_str,
3770 (void *) p, dest_align))
3771 return NULL_RTX;
3773 dest_mem = get_memory_rtx (dest, len);
3774 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3775 builtin_strncpy_read_str,
3776 (void *) p, dest_align, 0);
3777 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3778 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3779 return dest_mem;
3782 return NULL_RTX;
3785 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3786 bytes from constant string DATA + OFFSET and return it as target
3787 constant. */
3790 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3791 enum machine_mode mode)
3793 const char *c = (const char *) data;
3794 char *p = alloca (GET_MODE_SIZE (mode));
3796 memset (p, *c, GET_MODE_SIZE (mode));
3798 return c_readstr (p, mode);
3801 /* Callback routine for store_by_pieces. Return the RTL of a register
3802 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3803 char value given in the RTL register data. For example, if mode is
3804 4 bytes wide, return the RTL for 0x01010101*data. */
3806 static rtx
3807 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3808 enum machine_mode mode)
3810 rtx target, coeff;
3811 size_t size;
3812 char *p;
3814 size = GET_MODE_SIZE (mode);
3815 if (size == 1)
3816 return (rtx) data;
3818 p = alloca (size);
3819 memset (p, 1, size);
3820 coeff = c_readstr (p, mode);
3822 target = convert_to_mode (mode, (rtx) data, 1);
3823 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3824 return force_reg (mode, target);
3827 /* Expand expression EXP, which is a call to the memset builtin. Return
3828 NULL_RTX if we failed the caller should emit a normal call, otherwise
3829 try to get the result in TARGET, if convenient (and in mode MODE if that's
3830 convenient). */
3832 static rtx
3833 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3835 if (!validate_arglist (exp,
3836 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3837 return NULL_RTX;
3838 else
3840 tree dest = CALL_EXPR_ARG (exp, 0);
3841 tree val = CALL_EXPR_ARG (exp, 1);
3842 tree len = CALL_EXPR_ARG (exp, 2);
3843 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3847 /* Helper function to do the actual work for expand_builtin_memset. The
3848 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3849 so that this can also be called without constructing an actual CALL_EXPR.
3850 The other arguments and return value are the same as for
3851 expand_builtin_memset. */
3853 static rtx
3854 expand_builtin_memset_args (tree dest, tree val, tree len,
3855 rtx target, enum machine_mode mode, tree orig_exp)
3857 tree fndecl, fn;
3858 enum built_in_function fcode;
3859 char c;
3860 unsigned int dest_align;
3861 rtx dest_mem, dest_addr, len_rtx;
3862 HOST_WIDE_INT expected_size = -1;
3863 unsigned int expected_align = 0;
3865 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3867 /* If DEST is not a pointer type, don't do this operation in-line. */
3868 if (dest_align == 0)
3869 return NULL_RTX;
3871 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3872 if (expected_align < dest_align)
3873 expected_align = dest_align;
3875 /* If the LEN parameter is zero, return DEST. */
3876 if (integer_zerop (len))
3878 /* Evaluate and ignore VAL in case it has side-effects. */
3879 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3880 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3883 /* Stabilize the arguments in case we fail. */
3884 dest = builtin_save_expr (dest);
3885 val = builtin_save_expr (val);
3886 len = builtin_save_expr (len);
3888 len_rtx = expand_normal (len);
3889 dest_mem = get_memory_rtx (dest, len);
3891 if (TREE_CODE (val) != INTEGER_CST)
3893 rtx val_rtx;
3895 val_rtx = expand_normal (val);
3896 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3897 val_rtx, 0);
3899 /* Assume that we can memset by pieces if we can store
3900 * the coefficients by pieces (in the required modes).
3901 * We can't pass builtin_memset_gen_str as that emits RTL. */
3902 c = 1;
3903 if (host_integerp (len, 1)
3904 && !(optimize_size && tree_low_cst (len, 1) > 1)
3905 && can_store_by_pieces (tree_low_cst (len, 1),
3906 builtin_memset_read_str, &c, dest_align))
3908 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3909 val_rtx);
3910 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3911 builtin_memset_gen_str, val_rtx, dest_align, 0);
3913 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3914 dest_align, expected_align,
3915 expected_size))
3916 goto do_libcall;
3918 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3919 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3920 return dest_mem;
3923 if (target_char_cast (val, &c))
3924 goto do_libcall;
3926 if (c)
3928 if (host_integerp (len, 1)
3929 && !(optimize_size && tree_low_cst (len, 1) > 1)
3930 && can_store_by_pieces (tree_low_cst (len, 1),
3931 builtin_memset_read_str, &c, dest_align))
3932 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3933 builtin_memset_read_str, &c, dest_align, 0);
3934 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3935 dest_align, expected_align,
3936 expected_size))
3937 goto do_libcall;
3939 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3940 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3941 return dest_mem;
3944 set_mem_align (dest_mem, dest_align);
3945 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3946 CALL_EXPR_TAILCALL (orig_exp)
3947 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3948 expected_align, expected_size);
3950 if (dest_addr == 0)
3952 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3953 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3956 return dest_addr;
3958 do_libcall:
3959 fndecl = get_callee_fndecl (orig_exp);
3960 fcode = DECL_FUNCTION_CODE (fndecl);
3961 if (fcode == BUILT_IN_MEMSET)
3962 fn = build_call_expr (fndecl, 3, dest, val, len);
3963 else if (fcode == BUILT_IN_BZERO)
3964 fn = build_call_expr (fndecl, 2, dest, len);
3965 else
3966 gcc_unreachable ();
3967 if (TREE_CODE (fn) == CALL_EXPR)
3968 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3969 return expand_call (fn, target, target == const0_rtx);
3972 /* Expand expression EXP, which is a call to the bzero builtin. Return
3973 NULL_RTX if we failed the caller should emit a normal call. */
3975 static rtx
3976 expand_builtin_bzero (tree exp)
3978 tree dest, size;
3980 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3981 return NULL_RTX;
3983 dest = CALL_EXPR_ARG (exp, 0);
3984 size = CALL_EXPR_ARG (exp, 1);
3986 /* New argument list transforming bzero(ptr x, int y) to
3987 memset(ptr x, int 0, size_t y). This is done this way
3988 so that if it isn't expanded inline, we fallback to
3989 calling bzero instead of memset. */
3991 return expand_builtin_memset_args (dest, integer_zero_node,
3992 fold_convert (sizetype, size),
3993 const0_rtx, VOIDmode, exp);
3996 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
3997 caller should emit a normal call, otherwise try to get the result
3998 in TARGET, if convenient (and in mode MODE if that's convenient). */
4000 static rtx
4001 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4003 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4004 INTEGER_TYPE, VOID_TYPE))
4006 tree type = TREE_TYPE (exp);
4007 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4008 CALL_EXPR_ARG (exp, 1),
4009 CALL_EXPR_ARG (exp, 2), type);
4010 if (result)
4011 return expand_expr (result, target, mode, EXPAND_NORMAL);
4013 return NULL_RTX;
4016 /* Expand expression EXP, which is a call to the memcmp built-in function.
4017 Return NULL_RTX if we failed and the
4018 caller should emit a normal call, otherwise try to get the result in
4019 TARGET, if convenient (and in mode MODE, if that's convenient). */
4021 static rtx
4022 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4024 if (!validate_arglist (exp,
4025 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4026 return NULL_RTX;
4027 else
4029 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4030 CALL_EXPR_ARG (exp, 1),
4031 CALL_EXPR_ARG (exp, 2));
4032 if (result)
4033 return expand_expr (result, target, mode, EXPAND_NORMAL);
4036 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4038 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4039 rtx result;
4040 rtx insn;
4041 tree arg1 = CALL_EXPR_ARG (exp, 0);
4042 tree arg2 = CALL_EXPR_ARG (exp, 1);
4043 tree len = CALL_EXPR_ARG (exp, 2);
4045 int arg1_align
4046 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4047 int arg2_align
4048 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4049 enum machine_mode insn_mode;
4051 #ifdef HAVE_cmpmemsi
4052 if (HAVE_cmpmemsi)
4053 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4054 else
4055 #endif
4056 #ifdef HAVE_cmpstrnsi
4057 if (HAVE_cmpstrnsi)
4058 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4059 else
4060 #endif
4061 return NULL_RTX;
4063 /* If we don't have POINTER_TYPE, call the function. */
4064 if (arg1_align == 0 || arg2_align == 0)
4065 return NULL_RTX;
4067 /* Make a place to write the result of the instruction. */
4068 result = target;
4069 if (! (result != 0
4070 && REG_P (result) && GET_MODE (result) == insn_mode
4071 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4072 result = gen_reg_rtx (insn_mode);
4074 arg1_rtx = get_memory_rtx (arg1, len);
4075 arg2_rtx = get_memory_rtx (arg2, len);
4076 arg3_rtx = expand_normal (len);
4078 /* Set MEM_SIZE as appropriate. */
4079 if (GET_CODE (arg3_rtx) == CONST_INT)
4081 set_mem_size (arg1_rtx, arg3_rtx);
4082 set_mem_size (arg2_rtx, arg3_rtx);
4085 #ifdef HAVE_cmpmemsi
4086 if (HAVE_cmpmemsi)
4087 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4088 GEN_INT (MIN (arg1_align, arg2_align)));
4089 else
4090 #endif
4091 #ifdef HAVE_cmpstrnsi
4092 if (HAVE_cmpstrnsi)
4093 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4094 GEN_INT (MIN (arg1_align, arg2_align)));
4095 else
4096 #endif
4097 gcc_unreachable ();
4099 if (insn)
4100 emit_insn (insn);
4101 else
4102 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4103 TYPE_MODE (integer_type_node), 3,
4104 XEXP (arg1_rtx, 0), Pmode,
4105 XEXP (arg2_rtx, 0), Pmode,
4106 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4107 TYPE_UNSIGNED (sizetype)),
4108 TYPE_MODE (sizetype));
4110 /* Return the value in the proper mode for this function. */
4111 mode = TYPE_MODE (TREE_TYPE (exp));
4112 if (GET_MODE (result) == mode)
4113 return result;
4114 else if (target != 0)
4116 convert_move (target, result, 0);
4117 return target;
4119 else
4120 return convert_to_mode (mode, result, 0);
4122 #endif
4124 return NULL_RTX;
4127 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4128 if we failed the caller should emit a normal call, otherwise try to get
4129 the result in TARGET, if convenient. */
4131 static rtx
4132 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4134 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4135 return NULL_RTX;
4136 else
4138 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4139 CALL_EXPR_ARG (exp, 1));
4140 if (result)
4141 return expand_expr (result, target, mode, EXPAND_NORMAL);
4144 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4145 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4146 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4148 rtx arg1_rtx, arg2_rtx;
4149 rtx result, insn = NULL_RTX;
4150 tree fndecl, fn;
4151 tree arg1 = CALL_EXPR_ARG (exp, 0);
4152 tree arg2 = CALL_EXPR_ARG (exp, 1);
4154 int arg1_align
4155 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4156 int arg2_align
4157 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4159 /* If we don't have POINTER_TYPE, call the function. */
4160 if (arg1_align == 0 || arg2_align == 0)
4161 return NULL_RTX;
4163 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4164 arg1 = builtin_save_expr (arg1);
4165 arg2 = builtin_save_expr (arg2);
4167 arg1_rtx = get_memory_rtx (arg1, NULL);
4168 arg2_rtx = get_memory_rtx (arg2, NULL);
4170 #ifdef HAVE_cmpstrsi
4171 /* Try to call cmpstrsi. */
4172 if (HAVE_cmpstrsi)
4174 enum machine_mode insn_mode
4175 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4177 /* Make a place to write the result of the instruction. */
4178 result = target;
4179 if (! (result != 0
4180 && REG_P (result) && GET_MODE (result) == insn_mode
4181 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4182 result = gen_reg_rtx (insn_mode);
4184 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4185 GEN_INT (MIN (arg1_align, arg2_align)));
4187 #endif
4188 #ifdef HAVE_cmpstrnsi
4189 /* Try to determine at least one length and call cmpstrnsi. */
4190 if (!insn && HAVE_cmpstrnsi)
4192 tree len;
4193 rtx arg3_rtx;
4195 enum machine_mode insn_mode
4196 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4197 tree len1 = c_strlen (arg1, 1);
4198 tree len2 = c_strlen (arg2, 1);
4200 if (len1)
4201 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4202 if (len2)
4203 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4205 /* If we don't have a constant length for the first, use the length
4206 of the second, if we know it. We don't require a constant for
4207 this case; some cost analysis could be done if both are available
4208 but neither is constant. For now, assume they're equally cheap,
4209 unless one has side effects. If both strings have constant lengths,
4210 use the smaller. */
4212 if (!len1)
4213 len = len2;
4214 else if (!len2)
4215 len = len1;
4216 else if (TREE_SIDE_EFFECTS (len1))
4217 len = len2;
4218 else if (TREE_SIDE_EFFECTS (len2))
4219 len = len1;
4220 else if (TREE_CODE (len1) != INTEGER_CST)
4221 len = len2;
4222 else if (TREE_CODE (len2) != INTEGER_CST)
4223 len = len1;
4224 else if (tree_int_cst_lt (len1, len2))
4225 len = len1;
4226 else
4227 len = len2;
4229 /* If both arguments have side effects, we cannot optimize. */
4230 if (!len || TREE_SIDE_EFFECTS (len))
4231 goto do_libcall;
4233 arg3_rtx = expand_normal (len);
4235 /* Make a place to write the result of the instruction. */
4236 result = target;
4237 if (! (result != 0
4238 && REG_P (result) && GET_MODE (result) == insn_mode
4239 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4240 result = gen_reg_rtx (insn_mode);
4242 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4243 GEN_INT (MIN (arg1_align, arg2_align)));
4245 #endif
4247 if (insn)
4249 emit_insn (insn);
4251 /* Return the value in the proper mode for this function. */
4252 mode = TYPE_MODE (TREE_TYPE (exp));
4253 if (GET_MODE (result) == mode)
4254 return result;
4255 if (target == 0)
4256 return convert_to_mode (mode, result, 0);
4257 convert_move (target, result, 0);
4258 return target;
4261 /* Expand the library call ourselves using a stabilized argument
4262 list to avoid re-evaluating the function's arguments twice. */
4263 #ifdef HAVE_cmpstrnsi
4264 do_libcall:
4265 #endif
4266 fndecl = get_callee_fndecl (exp);
4267 fn = build_call_expr (fndecl, 2, arg1, arg2);
4268 if (TREE_CODE (fn) == CALL_EXPR)
4269 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4270 return expand_call (fn, target, target == const0_rtx);
4272 #endif
4273 return NULL_RTX;
4276 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4277 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4278 the result in TARGET, if convenient. */
4280 static rtx
4281 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4283 if (!validate_arglist (exp,
4284 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4285 return NULL_RTX;
4286 else
4288 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4289 CALL_EXPR_ARG (exp, 1),
4290 CALL_EXPR_ARG (exp, 2));
4291 if (result)
4292 return expand_expr (result, target, mode, EXPAND_NORMAL);
4295 /* If c_strlen can determine an expression for one of the string
4296 lengths, and it doesn't have side effects, then emit cmpstrnsi
4297 using length MIN(strlen(string)+1, arg3). */
4298 #ifdef HAVE_cmpstrnsi
4299 if (HAVE_cmpstrnsi)
4301 tree len, len1, len2;
4302 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4303 rtx result, insn;
4304 tree fndecl, fn;
4305 tree arg1 = CALL_EXPR_ARG (exp, 0);
4306 tree arg2 = CALL_EXPR_ARG (exp, 1);
4307 tree arg3 = CALL_EXPR_ARG (exp, 2);
4309 int arg1_align
4310 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4311 int arg2_align
4312 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4313 enum machine_mode insn_mode
4314 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4316 len1 = c_strlen (arg1, 1);
4317 len2 = c_strlen (arg2, 1);
4319 if (len1)
4320 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4321 if (len2)
4322 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4324 /* If we don't have a constant length for the first, use the length
4325 of the second, if we know it. We don't require a constant for
4326 this case; some cost analysis could be done if both are available
4327 but neither is constant. For now, assume they're equally cheap,
4328 unless one has side effects. If both strings have constant lengths,
4329 use the smaller. */
4331 if (!len1)
4332 len = len2;
4333 else if (!len2)
4334 len = len1;
4335 else if (TREE_SIDE_EFFECTS (len1))
4336 len = len2;
4337 else if (TREE_SIDE_EFFECTS (len2))
4338 len = len1;
4339 else if (TREE_CODE (len1) != INTEGER_CST)
4340 len = len2;
4341 else if (TREE_CODE (len2) != INTEGER_CST)
4342 len = len1;
4343 else if (tree_int_cst_lt (len1, len2))
4344 len = len1;
4345 else
4346 len = len2;
4348 /* If both arguments have side effects, we cannot optimize. */
4349 if (!len || TREE_SIDE_EFFECTS (len))
4350 return NULL_RTX;
4352 /* The actual new length parameter is MIN(len,arg3). */
4353 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4354 fold_convert (TREE_TYPE (len), arg3));
4356 /* If we don't have POINTER_TYPE, call the function. */
4357 if (arg1_align == 0 || arg2_align == 0)
4358 return NULL_RTX;
4360 /* Make a place to write the result of the instruction. */
4361 result = target;
4362 if (! (result != 0
4363 && REG_P (result) && GET_MODE (result) == insn_mode
4364 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4365 result = gen_reg_rtx (insn_mode);
4367 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4368 arg1 = builtin_save_expr (arg1);
4369 arg2 = builtin_save_expr (arg2);
4370 len = builtin_save_expr (len);
4372 arg1_rtx = get_memory_rtx (arg1, len);
4373 arg2_rtx = get_memory_rtx (arg2, len);
4374 arg3_rtx = expand_normal (len);
4375 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4376 GEN_INT (MIN (arg1_align, arg2_align)));
4377 if (insn)
4379 emit_insn (insn);
4381 /* Return the value in the proper mode for this function. */
4382 mode = TYPE_MODE (TREE_TYPE (exp));
4383 if (GET_MODE (result) == mode)
4384 return result;
4385 if (target == 0)
4386 return convert_to_mode (mode, result, 0);
4387 convert_move (target, result, 0);
4388 return target;
4391 /* Expand the library call ourselves using a stabilized argument
4392 list to avoid re-evaluating the function's arguments twice. */
4393 fndecl = get_callee_fndecl (exp);
4394 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4395 if (TREE_CODE (fn) == CALL_EXPR)
4396 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4397 return expand_call (fn, target, target == const0_rtx);
4399 #endif
4400 return NULL_RTX;
4403 /* Expand expression EXP, which is a call to the strcat builtin.
4404 Return NULL_RTX if we failed the caller should emit a normal call,
4405 otherwise try to get the result in TARGET, if convenient. */
4407 static rtx
4408 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4410 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4411 return NULL_RTX;
4412 else
4414 tree dst = CALL_EXPR_ARG (exp, 0);
4415 tree src = CALL_EXPR_ARG (exp, 1);
4416 const char *p = c_getstr (src);
4418 /* If the string length is zero, return the dst parameter. */
4419 if (p && *p == '\0')
4420 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4422 if (!optimize_size)
4424 /* See if we can store by pieces into (dst + strlen(dst)). */
4425 tree newsrc, newdst,
4426 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4427 rtx insns;
4429 /* Stabilize the argument list. */
4430 newsrc = builtin_save_expr (src);
4431 dst = builtin_save_expr (dst);
4433 start_sequence ();
4435 /* Create strlen (dst). */
4436 newdst = build_call_expr (strlen_fn, 1, dst);
4437 /* Create (dst + (cast) strlen (dst)). */
4438 newdst = fold_convert (TREE_TYPE (dst), newdst);
4439 newdst = fold_build2 (PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4441 newdst = builtin_save_expr (newdst);
4443 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4445 end_sequence (); /* Stop sequence. */
4446 return NULL_RTX;
4449 /* Output the entire sequence. */
4450 insns = get_insns ();
4451 end_sequence ();
4452 emit_insn (insns);
4454 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4457 return NULL_RTX;
4461 /* Expand expression EXP, which is a call to the strncat builtin.
4462 Return NULL_RTX if we failed the caller should emit a normal call,
4463 otherwise try to get the result in TARGET, if convenient. */
4465 static rtx
4466 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4468 if (validate_arglist (exp,
4469 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4471 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4472 CALL_EXPR_ARG (exp, 1),
4473 CALL_EXPR_ARG (exp, 2));
4474 if (result)
4475 return expand_expr (result, target, mode, EXPAND_NORMAL);
4477 return NULL_RTX;
4480 /* Expand expression EXP, which is a call to the strspn builtin.
4481 Return NULL_RTX if we failed the caller should emit a normal call,
4482 otherwise try to get the result in TARGET, if convenient. */
4484 static rtx
4485 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4487 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4489 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4490 CALL_EXPR_ARG (exp, 1));
4491 if (result)
4492 return expand_expr (result, target, mode, EXPAND_NORMAL);
4494 return NULL_RTX;
4497 /* Expand expression EXP, which is a call to the strcspn builtin.
4498 Return NULL_RTX if we failed the caller should emit a normal call,
4499 otherwise try to get the result in TARGET, if convenient. */
4501 static rtx
4502 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4504 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4506 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4507 CALL_EXPR_ARG (exp, 1));
4508 if (result)
4509 return expand_expr (result, target, mode, EXPAND_NORMAL);
4511 return NULL_RTX;
4514 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4515 if that's convenient. */
4518 expand_builtin_saveregs (void)
4520 rtx val, seq;
4522 /* Don't do __builtin_saveregs more than once in a function.
4523 Save the result of the first call and reuse it. */
4524 if (saveregs_value != 0)
4525 return saveregs_value;
4527 /* When this function is called, it means that registers must be
4528 saved on entry to this function. So we migrate the call to the
4529 first insn of this function. */
4531 start_sequence ();
4533 /* Do whatever the machine needs done in this case. */
4534 val = targetm.calls.expand_builtin_saveregs ();
4536 seq = get_insns ();
4537 end_sequence ();
4539 saveregs_value = val;
4541 /* Put the insns after the NOTE that starts the function. If this
4542 is inside a start_sequence, make the outer-level insn chain current, so
4543 the code is placed at the start of the function. */
4544 push_topmost_sequence ();
4545 emit_insn_after (seq, entry_of_function ());
4546 pop_topmost_sequence ();
4548 return val;
4551 /* __builtin_args_info (N) returns word N of the arg space info
4552 for the current function. The number and meanings of words
4553 is controlled by the definition of CUMULATIVE_ARGS. */
4555 static rtx
4556 expand_builtin_args_info (tree exp)
4558 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4559 int *word_ptr = (int *) &current_function_args_info;
4561 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4563 if (call_expr_nargs (exp) != 0)
4565 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4566 error ("argument of %<__builtin_args_info%> must be constant");
4567 else
4569 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4571 if (wordnum < 0 || wordnum >= nwords)
4572 error ("argument of %<__builtin_args_info%> out of range");
4573 else
4574 return GEN_INT (word_ptr[wordnum]);
4577 else
4578 error ("missing argument in %<__builtin_args_info%>");
4580 return const0_rtx;
4583 /* Expand a call to __builtin_next_arg. */
4585 static rtx
4586 expand_builtin_next_arg (void)
4588 /* Checking arguments is already done in fold_builtin_next_arg
4589 that must be called before this function. */
4590 return expand_binop (Pmode, add_optab,
4591 current_function_internal_arg_pointer,
4592 current_function_arg_offset_rtx,
4593 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4596 /* Make it easier for the backends by protecting the valist argument
4597 from multiple evaluations. */
4599 static tree
4600 stabilize_va_list (tree valist, int needs_lvalue)
4602 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4604 if (TREE_SIDE_EFFECTS (valist))
4605 valist = save_expr (valist);
4607 /* For this case, the backends will be expecting a pointer to
4608 TREE_TYPE (va_list_type_node), but it's possible we've
4609 actually been given an array (an actual va_list_type_node).
4610 So fix it. */
4611 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4613 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4614 valist = build_fold_addr_expr_with_type (valist, p1);
4617 else
4619 tree pt;
4621 if (! needs_lvalue)
4623 if (! TREE_SIDE_EFFECTS (valist))
4624 return valist;
4626 pt = build_pointer_type (va_list_type_node);
4627 valist = fold_build1 (ADDR_EXPR, pt, valist);
4628 TREE_SIDE_EFFECTS (valist) = 1;
4631 if (TREE_SIDE_EFFECTS (valist))
4632 valist = save_expr (valist);
4633 valist = build_fold_indirect_ref (valist);
4636 return valist;
4639 /* The "standard" definition of va_list is void*. */
4641 tree
4642 std_build_builtin_va_list (void)
4644 return ptr_type_node;
4647 /* The "standard" implementation of va_start: just assign `nextarg' to
4648 the variable. */
4650 void
4651 std_expand_builtin_va_start (tree valist, rtx nextarg)
4653 tree t;
4655 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
4656 make_tree (ptr_type_node, nextarg));
4657 TREE_SIDE_EFFECTS (t) = 1;
4659 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4662 /* Expand EXP, a call to __builtin_va_start. */
4664 static rtx
4665 expand_builtin_va_start (tree exp)
4667 rtx nextarg;
4668 tree valist;
4670 if (call_expr_nargs (exp) < 2)
4672 error ("too few arguments to function %<va_start%>");
4673 return const0_rtx;
4676 if (fold_builtin_next_arg (exp, true))
4677 return const0_rtx;
4679 nextarg = expand_builtin_next_arg ();
4680 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4682 #ifdef EXPAND_BUILTIN_VA_START
4683 EXPAND_BUILTIN_VA_START (valist, nextarg);
4684 #else
4685 std_expand_builtin_va_start (valist, nextarg);
4686 #endif
4688 return const0_rtx;
4691 /* The "standard" implementation of va_arg: read the value from the
4692 current (padded) address and increment by the (padded) size. */
4694 tree
4695 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4697 tree addr, t, type_size, rounded_size, valist_tmp;
4698 unsigned HOST_WIDE_INT align, boundary;
4699 bool indirect;
4701 #ifdef ARGS_GROW_DOWNWARD
4702 /* All of the alignment and movement below is for args-grow-up machines.
4703 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4704 implement their own specialized gimplify_va_arg_expr routines. */
4705 gcc_unreachable ();
4706 #endif
4708 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4709 if (indirect)
4710 type = build_pointer_type (type);
4712 align = PARM_BOUNDARY / BITS_PER_UNIT;
4713 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4715 /* Hoist the valist value into a temporary for the moment. */
4716 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4718 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4719 requires greater alignment, we must perform dynamic alignment. */
4720 if (boundary > align
4721 && !integer_zerop (TYPE_SIZE (type)))
4723 t = fold_convert (TREE_TYPE (valist), size_int (boundary - 1));
4724 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4725 build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t));
4726 gimplify_and_add (t, pre_p);
4728 t = fold_convert (TREE_TYPE (valist), size_int (-boundary));
4729 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4730 build2 (BIT_AND_EXPR, TREE_TYPE (valist), valist_tmp, t));
4731 gimplify_and_add (t, pre_p);
4733 else
4734 boundary = align;
4736 /* If the actual alignment is less than the alignment of the type,
4737 adjust the type accordingly so that we don't assume strict alignment
4738 when deferencing the pointer. */
4739 boundary *= BITS_PER_UNIT;
4740 if (boundary < TYPE_ALIGN (type))
4742 type = build_variant_type_copy (type);
4743 TYPE_ALIGN (type) = boundary;
4746 /* Compute the rounded size of the type. */
4747 type_size = size_in_bytes (type);
4748 rounded_size = round_up (type_size, align);
4750 /* Reduce rounded_size so it's sharable with the postqueue. */
4751 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4753 /* Get AP. */
4754 addr = valist_tmp;
4755 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4757 /* Small args are padded downward. */
4758 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4759 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4760 size_binop (MINUS_EXPR, rounded_size, type_size));
4761 t = fold_convert (TREE_TYPE (addr), t);
4762 addr = fold_build2 (PLUS_EXPR, TREE_TYPE (addr), addr, t);
4765 /* Compute new value for AP. */
4766 t = fold_convert (TREE_TYPE (valist), rounded_size);
4767 t = build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t);
4768 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4769 gimplify_and_add (t, pre_p);
4771 addr = fold_convert (build_pointer_type (type), addr);
4773 if (indirect)
4774 addr = build_va_arg_indirect_ref (addr);
4776 return build_va_arg_indirect_ref (addr);
4779 /* Build an indirect-ref expression over the given TREE, which represents a
4780 piece of a va_arg() expansion. */
4781 tree
4782 build_va_arg_indirect_ref (tree addr)
4784 addr = build_fold_indirect_ref (addr);
4786 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4787 mf_mark (addr);
4789 return addr;
4792 /* Return a dummy expression of type TYPE in order to keep going after an
4793 error. */
4795 static tree
4796 dummy_object (tree type)
4798 tree t = build_int_cst (build_pointer_type (type), 0);
4799 return build1 (INDIRECT_REF, type, t);
4802 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4803 builtin function, but a very special sort of operator. */
4805 enum gimplify_status
4806 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4808 tree promoted_type, want_va_type, have_va_type;
4809 tree valist = TREE_OPERAND (*expr_p, 0);
4810 tree type = TREE_TYPE (*expr_p);
4811 tree t;
4813 /* Verify that valist is of the proper type. */
4814 want_va_type = va_list_type_node;
4815 have_va_type = TREE_TYPE (valist);
4817 if (have_va_type == error_mark_node)
4818 return GS_ERROR;
4820 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4822 /* If va_list is an array type, the argument may have decayed
4823 to a pointer type, e.g. by being passed to another function.
4824 In that case, unwrap both types so that we can compare the
4825 underlying records. */
4826 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4827 || POINTER_TYPE_P (have_va_type))
4829 want_va_type = TREE_TYPE (want_va_type);
4830 have_va_type = TREE_TYPE (have_va_type);
4834 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4836 error ("first argument to %<va_arg%> not of type %<va_list%>");
4837 return GS_ERROR;
4840 /* Generate a diagnostic for requesting data of a type that cannot
4841 be passed through `...' due to type promotion at the call site. */
4842 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4843 != type)
4845 static bool gave_help;
4847 /* Unfortunately, this is merely undefined, rather than a constraint
4848 violation, so we cannot make this an error. If this call is never
4849 executed, the program is still strictly conforming. */
4850 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4851 type, promoted_type);
4852 if (! gave_help)
4854 gave_help = true;
4855 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4856 promoted_type, type);
4859 /* We can, however, treat "undefined" any way we please.
4860 Call abort to encourage the user to fix the program. */
4861 inform ("if this code is reached, the program will abort");
4862 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4863 append_to_statement_list (t, pre_p);
4865 /* This is dead code, but go ahead and finish so that the
4866 mode of the result comes out right. */
4867 *expr_p = dummy_object (type);
4868 return GS_ALL_DONE;
4870 else
4872 /* Make it easier for the backends by protecting the valist argument
4873 from multiple evaluations. */
4874 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4876 /* For this case, the backends will be expecting a pointer to
4877 TREE_TYPE (va_list_type_node), but it's possible we've
4878 actually been given an array (an actual va_list_type_node).
4879 So fix it. */
4880 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4882 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4883 valist = build_fold_addr_expr_with_type (valist, p1);
4885 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4887 else
4888 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4890 if (!targetm.gimplify_va_arg_expr)
4891 /* FIXME:Once most targets are converted we should merely
4892 assert this is non-null. */
4893 return GS_ALL_DONE;
4895 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4896 return GS_OK;
4900 /* Expand EXP, a call to __builtin_va_end. */
4902 static rtx
4903 expand_builtin_va_end (tree exp)
4905 tree valist = CALL_EXPR_ARG (exp, 0);
4907 /* Evaluate for side effects, if needed. I hate macros that don't
4908 do that. */
4909 if (TREE_SIDE_EFFECTS (valist))
4910 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4912 return const0_rtx;
4915 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4916 builtin rather than just as an assignment in stdarg.h because of the
4917 nastiness of array-type va_list types. */
4919 static rtx
4920 expand_builtin_va_copy (tree exp)
4922 tree dst, src, t;
4924 dst = CALL_EXPR_ARG (exp, 0);
4925 src = CALL_EXPR_ARG (exp, 1);
4927 dst = stabilize_va_list (dst, 1);
4928 src = stabilize_va_list (src, 0);
4930 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4932 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4933 TREE_SIDE_EFFECTS (t) = 1;
4934 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4936 else
4938 rtx dstb, srcb, size;
4940 /* Evaluate to pointers. */
4941 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4942 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4943 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4944 VOIDmode, EXPAND_NORMAL);
4946 dstb = convert_memory_address (Pmode, dstb);
4947 srcb = convert_memory_address (Pmode, srcb);
4949 /* "Dereference" to BLKmode memories. */
4950 dstb = gen_rtx_MEM (BLKmode, dstb);
4951 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4952 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4953 srcb = gen_rtx_MEM (BLKmode, srcb);
4954 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4955 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4957 /* Copy. */
4958 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4961 return const0_rtx;
4964 /* Expand a call to one of the builtin functions __builtin_frame_address or
4965 __builtin_return_address. */
4967 static rtx
4968 expand_builtin_frame_address (tree fndecl, tree exp)
4970 /* The argument must be a nonnegative integer constant.
4971 It counts the number of frames to scan up the stack.
4972 The value is the return address saved in that frame. */
4973 if (call_expr_nargs (exp) == 0)
4974 /* Warning about missing arg was already issued. */
4975 return const0_rtx;
4976 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4978 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4979 error ("invalid argument to %<__builtin_frame_address%>");
4980 else
4981 error ("invalid argument to %<__builtin_return_address%>");
4982 return const0_rtx;
4984 else
4986 rtx tem
4987 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4988 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4990 /* Some ports cannot access arbitrary stack frames. */
4991 if (tem == NULL)
4993 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4994 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4995 else
4996 warning (0, "unsupported argument to %<__builtin_return_address%>");
4997 return const0_rtx;
5000 /* For __builtin_frame_address, return what we've got. */
5001 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5002 return tem;
5004 if (!REG_P (tem)
5005 && ! CONSTANT_P (tem))
5006 tem = copy_to_mode_reg (Pmode, tem);
5007 return tem;
5011 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5012 we failed and the caller should emit a normal call, otherwise try to get
5013 the result in TARGET, if convenient. */
5015 static rtx
5016 expand_builtin_alloca (tree exp, rtx target)
5018 rtx op0;
5019 rtx result;
5021 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5022 should always expand to function calls. These can be intercepted
5023 in libmudflap. */
5024 if (flag_mudflap)
5025 return NULL_RTX;
5027 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5028 return NULL_RTX;
5030 /* Compute the argument. */
5031 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5033 /* Allocate the desired space. */
5034 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5035 result = convert_memory_address (ptr_mode, result);
5037 return result;
5040 /* Expand a call to a bswap builtin with argument ARG0. MODE
5041 is the mode to expand with. */
5043 static rtx
5044 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5046 enum machine_mode mode;
5047 tree arg;
5048 rtx op0;
5050 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5051 return NULL_RTX;
5053 arg = CALL_EXPR_ARG (exp, 0);
5054 mode = TYPE_MODE (TREE_TYPE (arg));
5055 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5057 target = expand_unop (mode, bswap_optab, op0, target, 1);
5059 gcc_assert (target);
5061 return convert_to_mode (mode, target, 0);
5064 /* Expand a call to a unary builtin in EXP.
5065 Return NULL_RTX if a normal call should be emitted rather than expanding the
5066 function in-line. If convenient, the result should be placed in TARGET.
5067 SUBTARGET may be used as the target for computing one of EXP's operands. */
5069 static rtx
5070 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5071 rtx subtarget, optab op_optab)
5073 rtx op0;
5075 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5076 return NULL_RTX;
5078 /* Compute the argument. */
5079 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5080 VOIDmode, EXPAND_NORMAL);
5081 /* Compute op, into TARGET if possible.
5082 Set TARGET to wherever the result comes back. */
5083 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5084 op_optab, op0, target, 1);
5085 gcc_assert (target);
5087 return convert_to_mode (target_mode, target, 0);
5090 /* If the string passed to fputs is a constant and is one character
5091 long, we attempt to transform this call into __builtin_fputc(). */
5093 static rtx
5094 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5096 /* Verify the arguments in the original call. */
5097 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5099 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5100 CALL_EXPR_ARG (exp, 1),
5101 (target == const0_rtx),
5102 unlocked, NULL_TREE);
5103 if (result)
5104 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5106 return NULL_RTX;
5109 /* Expand a call to __builtin_expect. We just return our argument
5110 as the builtin_expect semantic should've been already executed by
5111 tree branch prediction pass. */
5113 static rtx
5114 expand_builtin_expect (tree exp, rtx target)
5116 tree arg, c;
5118 if (call_expr_nargs (exp) < 2)
5119 return const0_rtx;
5120 arg = CALL_EXPR_ARG (exp, 0);
5121 c = CALL_EXPR_ARG (exp, 1);
5123 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5124 /* When guessing was done, the hints should be already stripped away. */
5125 gcc_assert (!flag_guess_branch_prob);
5126 return target;
5129 void
5130 expand_builtin_trap (void)
5132 #ifdef HAVE_trap
5133 if (HAVE_trap)
5134 emit_insn (gen_trap ());
5135 else
5136 #endif
5137 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5138 emit_barrier ();
5141 /* Expand EXP, a call to fabs, fabsf or fabsl.
5142 Return NULL_RTX if a normal call should be emitted rather than expanding
5143 the function inline. If convenient, the result should be placed
5144 in TARGET. SUBTARGET may be used as the target for computing
5145 the operand. */
5147 static rtx
5148 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5150 enum machine_mode mode;
5151 tree arg;
5152 rtx op0;
5154 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5155 return NULL_RTX;
5157 arg = CALL_EXPR_ARG (exp, 0);
5158 mode = TYPE_MODE (TREE_TYPE (arg));
5159 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5160 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5163 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5164 Return NULL is a normal call should be emitted rather than expanding the
5165 function inline. If convenient, the result should be placed in TARGET.
5166 SUBTARGET may be used as the target for computing the operand. */
5168 static rtx
5169 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5171 rtx op0, op1;
5172 tree arg;
5174 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5175 return NULL_RTX;
5177 arg = CALL_EXPR_ARG (exp, 0);
5178 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5180 arg = CALL_EXPR_ARG (exp, 1);
5181 op1 = expand_normal (arg);
5183 return expand_copysign (op0, op1, target);
5186 /* Create a new constant string literal and return a char* pointer to it.
5187 The STRING_CST value is the LEN characters at STR. */
5188 tree
5189 build_string_literal (int len, const char *str)
5191 tree t, elem, index, type;
5193 t = build_string (len, str);
5194 elem = build_type_variant (char_type_node, 1, 0);
5195 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5196 type = build_array_type (elem, index);
5197 TREE_TYPE (t) = type;
5198 TREE_CONSTANT (t) = 1;
5199 TREE_INVARIANT (t) = 1;
5200 TREE_READONLY (t) = 1;
5201 TREE_STATIC (t) = 1;
5203 type = build_pointer_type (type);
5204 t = build1 (ADDR_EXPR, type, t);
5206 type = build_pointer_type (elem);
5207 t = build1 (NOP_EXPR, type, t);
5208 return t;
5211 /* Expand EXP, a call to printf or printf_unlocked.
5212 Return NULL_RTX if a normal call should be emitted rather than transforming
5213 the function inline. If convenient, the result should be placed in
5214 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5215 call. */
5216 static rtx
5217 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5218 bool unlocked)
5220 /* If we're using an unlocked function, assume the other unlocked
5221 functions exist explicitly. */
5222 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5223 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5224 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5225 : implicit_built_in_decls[BUILT_IN_PUTS];
5226 const char *fmt_str;
5227 tree fn = 0;
5228 tree fmt, arg;
5229 int nargs = call_expr_nargs (exp);
5231 /* If the return value is used, don't do the transformation. */
5232 if (target != const0_rtx)
5233 return NULL_RTX;
5235 /* Verify the required arguments in the original call. */
5236 if (nargs == 0)
5237 return NULL_RTX;
5238 fmt = CALL_EXPR_ARG (exp, 0);
5239 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5240 return NULL_RTX;
5242 /* Check whether the format is a literal string constant. */
5243 fmt_str = c_getstr (fmt);
5244 if (fmt_str == NULL)
5245 return NULL_RTX;
5247 if (!init_target_chars ())
5248 return NULL_RTX;
5250 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5251 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5253 if ((nargs != 2)
5254 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5255 return NULL_RTX;
5256 if (fn_puts)
5257 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5259 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5260 else if (strcmp (fmt_str, target_percent_c) == 0)
5262 if ((nargs != 2)
5263 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5264 return NULL_RTX;
5265 if (fn_putchar)
5266 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5268 else
5270 /* We can't handle anything else with % args or %% ... yet. */
5271 if (strchr (fmt_str, target_percent))
5272 return NULL_RTX;
5274 if (nargs > 1)
5275 return NULL_RTX;
5277 /* If the format specifier was "", printf does nothing. */
5278 if (fmt_str[0] == '\0')
5279 return const0_rtx;
5280 /* If the format specifier has length of 1, call putchar. */
5281 if (fmt_str[1] == '\0')
5283 /* Given printf("c"), (where c is any one character,)
5284 convert "c"[0] to an int and pass that to the replacement
5285 function. */
5286 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5287 if (fn_putchar)
5288 fn = build_call_expr (fn_putchar, 1, arg);
5290 else
5292 /* If the format specifier was "string\n", call puts("string"). */
5293 size_t len = strlen (fmt_str);
5294 if ((unsigned char)fmt_str[len - 1] == target_newline)
5296 /* Create a NUL-terminated string that's one char shorter
5297 than the original, stripping off the trailing '\n'. */
5298 char *newstr = alloca (len);
5299 memcpy (newstr, fmt_str, len - 1);
5300 newstr[len - 1] = 0;
5301 arg = build_string_literal (len, newstr);
5302 if (fn_puts)
5303 fn = build_call_expr (fn_puts, 1, arg);
5305 else
5306 /* We'd like to arrange to call fputs(string,stdout) here,
5307 but we need stdout and don't have a way to get it yet. */
5308 return NULL_RTX;
5312 if (!fn)
5313 return NULL_RTX;
5314 if (TREE_CODE (fn) == CALL_EXPR)
5315 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5316 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5319 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5320 Return NULL_RTX if a normal call should be emitted rather than transforming
5321 the function inline. If convenient, the result should be placed in
5322 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5323 call. */
5324 static rtx
5325 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5326 bool unlocked)
5328 /* If we're using an unlocked function, assume the other unlocked
5329 functions exist explicitly. */
5330 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5331 : implicit_built_in_decls[BUILT_IN_FPUTC];
5332 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5333 : implicit_built_in_decls[BUILT_IN_FPUTS];
5334 const char *fmt_str;
5335 tree fn = 0;
5336 tree fmt, fp, arg;
5337 int nargs = call_expr_nargs (exp);
5339 /* If the return value is used, don't do the transformation. */
5340 if (target != const0_rtx)
5341 return NULL_RTX;
5343 /* Verify the required arguments in the original call. */
5344 if (nargs < 2)
5345 return NULL_RTX;
5346 fp = CALL_EXPR_ARG (exp, 0);
5347 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5348 return NULL_RTX;
5349 fmt = CALL_EXPR_ARG (exp, 1);
5350 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5351 return NULL_RTX;
5353 /* Check whether the format is a literal string constant. */
5354 fmt_str = c_getstr (fmt);
5355 if (fmt_str == NULL)
5356 return NULL_RTX;
5358 if (!init_target_chars ())
5359 return NULL_RTX;
5361 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5362 if (strcmp (fmt_str, target_percent_s) == 0)
5364 if ((nargs != 3)
5365 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5366 return NULL_RTX;
5367 arg = CALL_EXPR_ARG (exp, 2);
5368 if (fn_fputs)
5369 fn = build_call_expr (fn_fputs, 2, arg, fp);
5371 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5372 else if (strcmp (fmt_str, target_percent_c) == 0)
5374 if ((nargs != 3)
5375 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5376 return NULL_RTX;
5377 arg = CALL_EXPR_ARG (exp, 2);
5378 if (fn_fputc)
5379 fn = build_call_expr (fn_fputc, 2, arg, fp);
5381 else
5383 /* We can't handle anything else with % args or %% ... yet. */
5384 if (strchr (fmt_str, target_percent))
5385 return NULL_RTX;
5387 if (nargs > 2)
5388 return NULL_RTX;
5390 /* If the format specifier was "", fprintf does nothing. */
5391 if (fmt_str[0] == '\0')
5393 /* Evaluate and ignore FILE* argument for side-effects. */
5394 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5395 return const0_rtx;
5398 /* When "string" doesn't contain %, replace all cases of
5399 fprintf(stream,string) with fputs(string,stream). The fputs
5400 builtin will take care of special cases like length == 1. */
5401 if (fn_fputs)
5402 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5405 if (!fn)
5406 return NULL_RTX;
5407 if (TREE_CODE (fn) == CALL_EXPR)
5408 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5409 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5412 /* Expand a call EXP to sprintf. Return NULL_RTX if
5413 a normal call should be emitted rather than expanding the function
5414 inline. If convenient, the result should be placed in TARGET with
5415 mode MODE. */
5417 static rtx
5418 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5420 tree dest, fmt;
5421 const char *fmt_str;
5422 int nargs = call_expr_nargs (exp);
5424 /* Verify the required arguments in the original call. */
5425 if (nargs < 2)
5426 return NULL_RTX;
5427 dest = CALL_EXPR_ARG (exp, 0);
5428 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5429 return NULL_RTX;
5430 fmt = CALL_EXPR_ARG (exp, 0);
5431 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5432 return NULL_RTX;
5434 /* Check whether the format is a literal string constant. */
5435 fmt_str = c_getstr (fmt);
5436 if (fmt_str == NULL)
5437 return NULL_RTX;
5439 if (!init_target_chars ())
5440 return NULL_RTX;
5442 /* If the format doesn't contain % args or %%, use strcpy. */
5443 if (strchr (fmt_str, target_percent) == 0)
5445 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5446 tree exp;
5448 if ((nargs > 2) || ! fn)
5449 return NULL_RTX;
5450 expand_expr (build_call_expr (fn, 2, dest, fmt),
5451 const0_rtx, VOIDmode, EXPAND_NORMAL);
5452 if (target == const0_rtx)
5453 return const0_rtx;
5454 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5455 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5457 /* If the format is "%s", use strcpy if the result isn't used. */
5458 else if (strcmp (fmt_str, target_percent_s) == 0)
5460 tree fn, arg, len;
5461 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5463 if (! fn)
5464 return NULL_RTX;
5465 if (nargs != 3)
5466 return NULL_RTX;
5467 arg = CALL_EXPR_ARG (exp, 2);
5468 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5469 return NULL_RTX;
5471 if (target != const0_rtx)
5473 len = c_strlen (arg, 1);
5474 if (! len || TREE_CODE (len) != INTEGER_CST)
5475 return NULL_RTX;
5477 else
5478 len = NULL_TREE;
5480 expand_expr (build_call_expr (fn, 2, dest, arg),
5481 const0_rtx, VOIDmode, EXPAND_NORMAL);
5483 if (target == const0_rtx)
5484 return const0_rtx;
5485 return expand_expr (len, target, mode, EXPAND_NORMAL);
5488 return NULL_RTX;
5491 /* Expand a call to either the entry or exit function profiler. */
5493 static rtx
5494 expand_builtin_profile_func (bool exitp)
5496 rtx this, which;
5498 this = DECL_RTL (current_function_decl);
5499 gcc_assert (MEM_P (this));
5500 this = XEXP (this, 0);
5502 if (exitp)
5503 which = profile_function_exit_libfunc;
5504 else
5505 which = profile_function_entry_libfunc;
5507 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5508 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5510 Pmode);
5512 return const0_rtx;
5515 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5517 static rtx
5518 round_trampoline_addr (rtx tramp)
5520 rtx temp, addend, mask;
5522 /* If we don't need too much alignment, we'll have been guaranteed
5523 proper alignment by get_trampoline_type. */
5524 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5525 return tramp;
5527 /* Round address up to desired boundary. */
5528 temp = gen_reg_rtx (Pmode);
5529 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5530 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5532 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5533 temp, 0, OPTAB_LIB_WIDEN);
5534 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5535 temp, 0, OPTAB_LIB_WIDEN);
5537 return tramp;
5540 static rtx
5541 expand_builtin_init_trampoline (tree exp)
5543 tree t_tramp, t_func, t_chain;
5544 rtx r_tramp, r_func, r_chain;
5545 #ifdef TRAMPOLINE_TEMPLATE
5546 rtx blktramp;
5547 #endif
5549 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5550 POINTER_TYPE, VOID_TYPE))
5551 return NULL_RTX;
5553 t_tramp = CALL_EXPR_ARG (exp, 0);
5554 t_func = CALL_EXPR_ARG (exp, 1);
5555 t_chain = CALL_EXPR_ARG (exp, 2);
5557 r_tramp = expand_normal (t_tramp);
5558 r_func = expand_normal (t_func);
5559 r_chain = expand_normal (t_chain);
5561 /* Generate insns to initialize the trampoline. */
5562 r_tramp = round_trampoline_addr (r_tramp);
5563 #ifdef TRAMPOLINE_TEMPLATE
5564 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5565 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5566 emit_block_move (blktramp, assemble_trampoline_template (),
5567 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5568 #endif
5569 trampolines_created = 1;
5570 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5572 return const0_rtx;
5575 static rtx
5576 expand_builtin_adjust_trampoline (tree exp)
5578 rtx tramp;
5580 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5581 return NULL_RTX;
5583 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5584 tramp = round_trampoline_addr (tramp);
5585 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5586 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5587 #endif
5589 return tramp;
5592 /* Expand a call to the built-in signbit, signbitf, signbitl, signbitd32,
5593 signbitd64, or signbitd128 function.
5594 Return NULL_RTX if a normal call should be emitted rather than expanding
5595 the function in-line. EXP is the expression that is a call to the builtin
5596 function; if convenient, the result should be placed in TARGET. */
5598 static rtx
5599 expand_builtin_signbit (tree exp, rtx target)
5601 const struct real_format *fmt;
5602 enum machine_mode fmode, imode, rmode;
5603 HOST_WIDE_INT hi, lo;
5604 tree arg;
5605 int word, bitpos;
5606 rtx temp;
5608 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5609 return NULL_RTX;
5611 arg = CALL_EXPR_ARG (exp, 0);
5612 fmode = TYPE_MODE (TREE_TYPE (arg));
5613 rmode = TYPE_MODE (TREE_TYPE (exp));
5614 fmt = REAL_MODE_FORMAT (fmode);
5616 /* For floating point formats without a sign bit, implement signbit
5617 as "ARG < 0.0". */
5618 bitpos = fmt->signbit_ro;
5619 if (bitpos < 0)
5621 /* But we can't do this if the format supports signed zero. */
5622 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5623 return NULL_RTX;
5625 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5626 build_real (TREE_TYPE (arg), dconst0));
5627 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5630 temp = expand_normal (arg);
5631 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5633 imode = int_mode_for_mode (fmode);
5634 if (imode == BLKmode)
5635 return NULL_RTX;
5636 temp = gen_lowpart (imode, temp);
5638 else
5640 imode = word_mode;
5641 /* Handle targets with different FP word orders. */
5642 if (FLOAT_WORDS_BIG_ENDIAN)
5643 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5644 else
5645 word = bitpos / BITS_PER_WORD;
5646 temp = operand_subword_force (temp, word, fmode);
5647 bitpos = bitpos % BITS_PER_WORD;
5650 /* Force the intermediate word_mode (or narrower) result into a
5651 register. This avoids attempting to create paradoxical SUBREGs
5652 of floating point modes below. */
5653 temp = force_reg (imode, temp);
5655 /* If the bitpos is within the "result mode" lowpart, the operation
5656 can be implement with a single bitwise AND. Otherwise, we need
5657 a right shift and an AND. */
5659 if (bitpos < GET_MODE_BITSIZE (rmode))
5661 if (bitpos < HOST_BITS_PER_WIDE_INT)
5663 hi = 0;
5664 lo = (HOST_WIDE_INT) 1 << bitpos;
5666 else
5668 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5669 lo = 0;
5672 if (imode != rmode)
5673 temp = gen_lowpart (rmode, temp);
5674 temp = expand_binop (rmode, and_optab, temp,
5675 immed_double_const (lo, hi, rmode),
5676 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5678 else
5680 /* Perform a logical right shift to place the signbit in the least
5681 significant bit, then truncate the result to the desired mode
5682 and mask just this bit. */
5683 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5684 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5685 temp = gen_lowpart (rmode, temp);
5686 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5687 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5690 return temp;
5693 /* Expand fork or exec calls. TARGET is the desired target of the
5694 call. EXP is the call. FN is the
5695 identificator of the actual function. IGNORE is nonzero if the
5696 value is to be ignored. */
5698 static rtx
5699 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5701 tree id, decl;
5702 tree call;
5704 /* If we are not profiling, just call the function. */
5705 if (!profile_arc_flag)
5706 return NULL_RTX;
5708 /* Otherwise call the wrapper. This should be equivalent for the rest of
5709 compiler, so the code does not diverge, and the wrapper may run the
5710 code necessary for keeping the profiling sane. */
5712 switch (DECL_FUNCTION_CODE (fn))
5714 case BUILT_IN_FORK:
5715 id = get_identifier ("__gcov_fork");
5716 break;
5718 case BUILT_IN_EXECL:
5719 id = get_identifier ("__gcov_execl");
5720 break;
5722 case BUILT_IN_EXECV:
5723 id = get_identifier ("__gcov_execv");
5724 break;
5726 case BUILT_IN_EXECLP:
5727 id = get_identifier ("__gcov_execlp");
5728 break;
5730 case BUILT_IN_EXECLE:
5731 id = get_identifier ("__gcov_execle");
5732 break;
5734 case BUILT_IN_EXECVP:
5735 id = get_identifier ("__gcov_execvp");
5736 break;
5738 case BUILT_IN_EXECVE:
5739 id = get_identifier ("__gcov_execve");
5740 break;
5742 default:
5743 gcc_unreachable ();
5746 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5747 DECL_EXTERNAL (decl) = 1;
5748 TREE_PUBLIC (decl) = 1;
5749 DECL_ARTIFICIAL (decl) = 1;
5750 TREE_NOTHROW (decl) = 1;
5751 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5752 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5753 call = rewrite_call_expr (exp, 0, decl, 0);
5754 return expand_call (call, target, ignore);
5759 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5760 the pointer in these functions is void*, the tree optimizers may remove
5761 casts. The mode computed in expand_builtin isn't reliable either, due
5762 to __sync_bool_compare_and_swap.
5764 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5765 group of builtins. This gives us log2 of the mode size. */
5767 static inline enum machine_mode
5768 get_builtin_sync_mode (int fcode_diff)
5770 /* The size is not negotiable, so ask not to get BLKmode in return
5771 if the target indicates that a smaller size would be better. */
5772 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5775 /* Expand the memory expression LOC and return the appropriate memory operand
5776 for the builtin_sync operations. */
5778 static rtx
5779 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5781 rtx addr, mem;
5783 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5785 /* Note that we explicitly do not want any alias information for this
5786 memory, so that we kill all other live memories. Otherwise we don't
5787 satisfy the full barrier semantics of the intrinsic. */
5788 mem = validize_mem (gen_rtx_MEM (mode, addr));
5790 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5791 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5792 MEM_VOLATILE_P (mem) = 1;
5794 return mem;
5797 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5798 EXP is the CALL_EXPR. CODE is the rtx code
5799 that corresponds to the arithmetic or logical operation from the name;
5800 an exception here is that NOT actually means NAND. TARGET is an optional
5801 place for us to store the results; AFTER is true if this is the
5802 fetch_and_xxx form. IGNORE is true if we don't actually care about
5803 the result of the operation at all. */
5805 static rtx
5806 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5807 enum rtx_code code, bool after,
5808 rtx target, bool ignore)
5810 rtx val, mem;
5811 enum machine_mode old_mode;
5813 /* Expand the operands. */
5814 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5816 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5817 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5818 of CONST_INTs, where we know the old_mode only from the call argument. */
5819 old_mode = GET_MODE (val);
5820 if (old_mode == VOIDmode)
5821 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5822 val = convert_modes (mode, old_mode, val, 1);
5824 if (ignore)
5825 return expand_sync_operation (mem, val, code);
5826 else
5827 return expand_sync_fetch_operation (mem, val, code, after, target);
5830 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5831 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5832 true if this is the boolean form. TARGET is a place for us to store the
5833 results; this is NOT optional if IS_BOOL is true. */
5835 static rtx
5836 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5837 bool is_bool, rtx target)
5839 rtx old_val, new_val, mem;
5840 enum machine_mode old_mode;
5842 /* Expand the operands. */
5843 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5846 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5847 mode, EXPAND_NORMAL);
5848 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5849 of CONST_INTs, where we know the old_mode only from the call argument. */
5850 old_mode = GET_MODE (old_val);
5851 if (old_mode == VOIDmode)
5852 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5853 old_val = convert_modes (mode, old_mode, old_val, 1);
5855 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5856 mode, EXPAND_NORMAL);
5857 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5858 of CONST_INTs, where we know the old_mode only from the call argument. */
5859 old_mode = GET_MODE (new_val);
5860 if (old_mode == VOIDmode)
5861 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5862 new_val = convert_modes (mode, old_mode, new_val, 1);
5864 if (is_bool)
5865 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5866 else
5867 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5870 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5871 general form is actually an atomic exchange, and some targets only
5872 support a reduced form with the second argument being a constant 1.
5873 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5874 the results. */
5876 static rtx
5877 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5878 rtx target)
5880 rtx val, mem;
5881 enum machine_mode old_mode;
5883 /* Expand the operands. */
5884 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5885 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5886 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5887 of CONST_INTs, where we know the old_mode only from the call argument. */
5888 old_mode = GET_MODE (val);
5889 if (old_mode == VOIDmode)
5890 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5891 val = convert_modes (mode, old_mode, val, 1);
5893 return expand_sync_lock_test_and_set (mem, val, target);
5896 /* Expand the __sync_synchronize intrinsic. */
5898 static void
5899 expand_builtin_synchronize (void)
5901 tree x;
5903 #ifdef HAVE_memory_barrier
5904 if (HAVE_memory_barrier)
5906 emit_insn (gen_memory_barrier ());
5907 return;
5909 #endif
5911 /* If no explicit memory barrier instruction is available, create an
5912 empty asm stmt with a memory clobber. */
5913 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
5914 tree_cons (NULL, build_string (6, "memory"), NULL));
5915 ASM_VOLATILE_P (x) = 1;
5916 expand_asm_expr (x);
5919 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5921 static void
5922 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5924 enum insn_code icode;
5925 rtx mem, insn;
5926 rtx val = const0_rtx;
5928 /* Expand the operands. */
5929 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5931 /* If there is an explicit operation in the md file, use it. */
5932 icode = sync_lock_release[mode];
5933 if (icode != CODE_FOR_nothing)
5935 if (!insn_data[icode].operand[1].predicate (val, mode))
5936 val = force_reg (mode, val);
5938 insn = GEN_FCN (icode) (mem, val);
5939 if (insn)
5941 emit_insn (insn);
5942 return;
5946 /* Otherwise we can implement this operation by emitting a barrier
5947 followed by a store of zero. */
5948 expand_builtin_synchronize ();
5949 emit_move_insn (mem, val);
5952 /* Expand an expression EXP that calls a built-in function,
5953 with result going to TARGET if that's convenient
5954 (and in mode MODE if that's convenient).
5955 SUBTARGET may be used as the target for computing one of EXP's operands.
5956 IGNORE is nonzero if the value is to be ignored. */
5959 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5960 int ignore)
5962 tree fndecl = get_callee_fndecl (exp);
5963 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5964 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5966 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5967 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5969 /* When not optimizing, generate calls to library functions for a certain
5970 set of builtins. */
5971 if (!optimize
5972 && !called_as_built_in (fndecl)
5973 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5974 && fcode != BUILT_IN_ALLOCA)
5975 return expand_call (exp, target, ignore);
5977 /* The built-in function expanders test for target == const0_rtx
5978 to determine whether the function's result will be ignored. */
5979 if (ignore)
5980 target = const0_rtx;
5982 /* If the result of a pure or const built-in function is ignored, and
5983 none of its arguments are volatile, we can avoid expanding the
5984 built-in call and just evaluate the arguments for side-effects. */
5985 if (target == const0_rtx
5986 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
5988 bool volatilep = false;
5989 tree arg;
5990 call_expr_arg_iterator iter;
5992 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5993 if (TREE_THIS_VOLATILE (arg))
5995 volatilep = true;
5996 break;
5999 if (! volatilep)
6001 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6002 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6003 return const0_rtx;
6007 switch (fcode)
6009 CASE_FLT_FN (BUILT_IN_FABS):
6010 target = expand_builtin_fabs (exp, target, subtarget);
6011 if (target)
6012 return target;
6013 break;
6015 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6016 target = expand_builtin_copysign (exp, target, subtarget);
6017 if (target)
6018 return target;
6019 break;
6021 /* Just do a normal library call if we were unable to fold
6022 the values. */
6023 CASE_FLT_FN (BUILT_IN_CABS):
6024 break;
6026 CASE_FLT_FN (BUILT_IN_EXP):
6027 CASE_FLT_FN (BUILT_IN_EXP10):
6028 CASE_FLT_FN (BUILT_IN_POW10):
6029 CASE_FLT_FN (BUILT_IN_EXP2):
6030 CASE_FLT_FN (BUILT_IN_EXPM1):
6031 CASE_FLT_FN (BUILT_IN_LOGB):
6032 CASE_FLT_FN (BUILT_IN_LOG):
6033 CASE_FLT_FN (BUILT_IN_LOG10):
6034 CASE_FLT_FN (BUILT_IN_LOG2):
6035 CASE_FLT_FN (BUILT_IN_LOG1P):
6036 CASE_FLT_FN (BUILT_IN_TAN):
6037 CASE_FLT_FN (BUILT_IN_ASIN):
6038 CASE_FLT_FN (BUILT_IN_ACOS):
6039 CASE_FLT_FN (BUILT_IN_ATAN):
6040 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6041 because of possible accuracy problems. */
6042 if (! flag_unsafe_math_optimizations)
6043 break;
6044 CASE_FLT_FN (BUILT_IN_SQRT):
6045 CASE_FLT_FN (BUILT_IN_FLOOR):
6046 CASE_FLT_FN (BUILT_IN_CEIL):
6047 CASE_FLT_FN (BUILT_IN_TRUNC):
6048 CASE_FLT_FN (BUILT_IN_ROUND):
6049 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6050 CASE_FLT_FN (BUILT_IN_RINT):
6051 target = expand_builtin_mathfn (exp, target, subtarget);
6052 if (target)
6053 return target;
6054 break;
6056 CASE_FLT_FN (BUILT_IN_ILOGB):
6057 if (! flag_unsafe_math_optimizations)
6058 break;
6059 CASE_FLT_FN (BUILT_IN_ISINF):
6060 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6061 if (target)
6062 return target;
6063 break;
6065 CASE_FLT_FN (BUILT_IN_LCEIL):
6066 CASE_FLT_FN (BUILT_IN_LLCEIL):
6067 CASE_FLT_FN (BUILT_IN_LFLOOR):
6068 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6069 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6070 if (target)
6071 return target;
6072 break;
6074 CASE_FLT_FN (BUILT_IN_LRINT):
6075 CASE_FLT_FN (BUILT_IN_LLRINT):
6076 CASE_FLT_FN (BUILT_IN_LROUND):
6077 CASE_FLT_FN (BUILT_IN_LLROUND):
6078 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6079 if (target)
6080 return target;
6081 break;
6083 CASE_FLT_FN (BUILT_IN_POW):
6084 target = expand_builtin_pow (exp, target, subtarget);
6085 if (target)
6086 return target;
6087 break;
6089 CASE_FLT_FN (BUILT_IN_POWI):
6090 target = expand_builtin_powi (exp, target, subtarget);
6091 if (target)
6092 return target;
6093 break;
6095 CASE_FLT_FN (BUILT_IN_ATAN2):
6096 CASE_FLT_FN (BUILT_IN_LDEXP):
6097 CASE_FLT_FN (BUILT_IN_SCALB):
6098 CASE_FLT_FN (BUILT_IN_SCALBN):
6099 CASE_FLT_FN (BUILT_IN_SCALBLN):
6100 if (! flag_unsafe_math_optimizations)
6101 break;
6103 CASE_FLT_FN (BUILT_IN_FMOD):
6104 CASE_FLT_FN (BUILT_IN_REMAINDER):
6105 CASE_FLT_FN (BUILT_IN_DREM):
6106 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6107 if (target)
6108 return target;
6109 break;
6111 CASE_FLT_FN (BUILT_IN_CEXPI):
6112 target = expand_builtin_cexpi (exp, target, subtarget);
6113 gcc_assert (target);
6114 return target;
6116 CASE_FLT_FN (BUILT_IN_SIN):
6117 CASE_FLT_FN (BUILT_IN_COS):
6118 if (! flag_unsafe_math_optimizations)
6119 break;
6120 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6121 if (target)
6122 return target;
6123 break;
6125 CASE_FLT_FN (BUILT_IN_SINCOS):
6126 if (! flag_unsafe_math_optimizations)
6127 break;
6128 target = expand_builtin_sincos (exp);
6129 if (target)
6130 return target;
6131 break;
6133 case BUILT_IN_APPLY_ARGS:
6134 return expand_builtin_apply_args ();
6136 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6137 FUNCTION with a copy of the parameters described by
6138 ARGUMENTS, and ARGSIZE. It returns a block of memory
6139 allocated on the stack into which is stored all the registers
6140 that might possibly be used for returning the result of a
6141 function. ARGUMENTS is the value returned by
6142 __builtin_apply_args. ARGSIZE is the number of bytes of
6143 arguments that must be copied. ??? How should this value be
6144 computed? We'll also need a safe worst case value for varargs
6145 functions. */
6146 case BUILT_IN_APPLY:
6147 if (!validate_arglist (exp, POINTER_TYPE,
6148 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6149 && !validate_arglist (exp, REFERENCE_TYPE,
6150 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6151 return const0_rtx;
6152 else
6154 rtx ops[3];
6156 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6157 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6158 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6160 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6163 /* __builtin_return (RESULT) causes the function to return the
6164 value described by RESULT. RESULT is address of the block of
6165 memory returned by __builtin_apply. */
6166 case BUILT_IN_RETURN:
6167 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6168 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6169 return const0_rtx;
6171 case BUILT_IN_SAVEREGS:
6172 return expand_builtin_saveregs ();
6174 case BUILT_IN_ARGS_INFO:
6175 return expand_builtin_args_info (exp);
6177 /* Return the address of the first anonymous stack arg. */
6178 case BUILT_IN_NEXT_ARG:
6179 if (fold_builtin_next_arg (exp, false))
6180 return const0_rtx;
6181 return expand_builtin_next_arg ();
6183 case BUILT_IN_CLASSIFY_TYPE:
6184 return expand_builtin_classify_type (exp);
6186 case BUILT_IN_CONSTANT_P:
6187 return const0_rtx;
6189 case BUILT_IN_FRAME_ADDRESS:
6190 case BUILT_IN_RETURN_ADDRESS:
6191 return expand_builtin_frame_address (fndecl, exp);
6193 /* Returns the address of the area where the structure is returned.
6194 0 otherwise. */
6195 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6196 if (call_expr_nargs (exp) != 0
6197 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6198 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6199 return const0_rtx;
6200 else
6201 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6203 case BUILT_IN_ALLOCA:
6204 target = expand_builtin_alloca (exp, target);
6205 if (target)
6206 return target;
6207 break;
6209 case BUILT_IN_STACK_SAVE:
6210 return expand_stack_save ();
6212 case BUILT_IN_STACK_RESTORE:
6213 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6214 return const0_rtx;
6216 case BUILT_IN_BSWAP32:
6217 case BUILT_IN_BSWAP64:
6218 target = expand_builtin_bswap (exp, target, subtarget);
6220 if (target)
6221 return target;
6222 break;
6224 CASE_INT_FN (BUILT_IN_FFS):
6225 case BUILT_IN_FFSIMAX:
6226 target = expand_builtin_unop (target_mode, exp, target,
6227 subtarget, ffs_optab);
6228 if (target)
6229 return target;
6230 break;
6232 CASE_INT_FN (BUILT_IN_CLZ):
6233 case BUILT_IN_CLZIMAX:
6234 target = expand_builtin_unop (target_mode, exp, target,
6235 subtarget, clz_optab);
6236 if (target)
6237 return target;
6238 break;
6240 CASE_INT_FN (BUILT_IN_CTZ):
6241 case BUILT_IN_CTZIMAX:
6242 target = expand_builtin_unop (target_mode, exp, target,
6243 subtarget, ctz_optab);
6244 if (target)
6245 return target;
6246 break;
6248 CASE_INT_FN (BUILT_IN_POPCOUNT):
6249 case BUILT_IN_POPCOUNTIMAX:
6250 target = expand_builtin_unop (target_mode, exp, target,
6251 subtarget, popcount_optab);
6252 if (target)
6253 return target;
6254 break;
6256 CASE_INT_FN (BUILT_IN_PARITY):
6257 case BUILT_IN_PARITYIMAX:
6258 target = expand_builtin_unop (target_mode, exp, target,
6259 subtarget, parity_optab);
6260 if (target)
6261 return target;
6262 break;
6264 case BUILT_IN_STRLEN:
6265 target = expand_builtin_strlen (exp, target, target_mode);
6266 if (target)
6267 return target;
6268 break;
6270 case BUILT_IN_STRCPY:
6271 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6272 if (target)
6273 return target;
6274 break;
6276 case BUILT_IN_STRNCPY:
6277 target = expand_builtin_strncpy (exp, target, mode);
6278 if (target)
6279 return target;
6280 break;
6282 case BUILT_IN_STPCPY:
6283 target = expand_builtin_stpcpy (exp, target, mode);
6284 if (target)
6285 return target;
6286 break;
6288 case BUILT_IN_STRCAT:
6289 target = expand_builtin_strcat (fndecl, exp, target, mode);
6290 if (target)
6291 return target;
6292 break;
6294 case BUILT_IN_STRNCAT:
6295 target = expand_builtin_strncat (exp, target, mode);
6296 if (target)
6297 return target;
6298 break;
6300 case BUILT_IN_STRSPN:
6301 target = expand_builtin_strspn (exp, target, mode);
6302 if (target)
6303 return target;
6304 break;
6306 case BUILT_IN_STRCSPN:
6307 target = expand_builtin_strcspn (exp, target, mode);
6308 if (target)
6309 return target;
6310 break;
6312 case BUILT_IN_STRSTR:
6313 target = expand_builtin_strstr (exp, target, mode);
6314 if (target)
6315 return target;
6316 break;
6318 case BUILT_IN_STRPBRK:
6319 target = expand_builtin_strpbrk (exp, target, mode);
6320 if (target)
6321 return target;
6322 break;
6324 case BUILT_IN_INDEX:
6325 case BUILT_IN_STRCHR:
6326 target = expand_builtin_strchr (exp, target, mode);
6327 if (target)
6328 return target;
6329 break;
6331 case BUILT_IN_RINDEX:
6332 case BUILT_IN_STRRCHR:
6333 target = expand_builtin_strrchr (exp, target, mode);
6334 if (target)
6335 return target;
6336 break;
6338 case BUILT_IN_MEMCPY:
6339 target = expand_builtin_memcpy (exp, target, mode);
6340 if (target)
6341 return target;
6342 break;
6344 case BUILT_IN_MEMPCPY:
6345 target = expand_builtin_mempcpy (exp, target, mode);
6346 if (target)
6347 return target;
6348 break;
6350 case BUILT_IN_MEMMOVE:
6351 target = expand_builtin_memmove (exp, target, mode, ignore);
6352 if (target)
6353 return target;
6354 break;
6356 case BUILT_IN_BCOPY:
6357 target = expand_builtin_bcopy (exp, ignore);
6358 if (target)
6359 return target;
6360 break;
6362 case BUILT_IN_MEMSET:
6363 target = expand_builtin_memset (exp, target, mode);
6364 if (target)
6365 return target;
6366 break;
6368 case BUILT_IN_BZERO:
6369 target = expand_builtin_bzero (exp);
6370 if (target)
6371 return target;
6372 break;
6374 case BUILT_IN_STRCMP:
6375 target = expand_builtin_strcmp (exp, target, mode);
6376 if (target)
6377 return target;
6378 break;
6380 case BUILT_IN_STRNCMP:
6381 target = expand_builtin_strncmp (exp, target, mode);
6382 if (target)
6383 return target;
6384 break;
6386 case BUILT_IN_MEMCHR:
6387 target = expand_builtin_memchr (exp, target, mode);
6388 if (target)
6389 return target;
6390 break;
6392 case BUILT_IN_BCMP:
6393 case BUILT_IN_MEMCMP:
6394 target = expand_builtin_memcmp (exp, target, mode);
6395 if (target)
6396 return target;
6397 break;
6399 case BUILT_IN_SETJMP:
6400 /* This should have been lowered to the builtins below. */
6401 gcc_unreachable ();
6403 case BUILT_IN_SETJMP_SETUP:
6404 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6405 and the receiver label. */
6406 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6408 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6409 VOIDmode, EXPAND_NORMAL);
6410 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6411 rtx label_r = label_rtx (label);
6413 /* This is copied from the handling of non-local gotos. */
6414 expand_builtin_setjmp_setup (buf_addr, label_r);
6415 nonlocal_goto_handler_labels
6416 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6417 nonlocal_goto_handler_labels);
6418 /* ??? Do not let expand_label treat us as such since we would
6419 not want to be both on the list of non-local labels and on
6420 the list of forced labels. */
6421 FORCED_LABEL (label) = 0;
6422 return const0_rtx;
6424 break;
6426 case BUILT_IN_SETJMP_DISPATCHER:
6427 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6428 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6430 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6431 rtx label_r = label_rtx (label);
6433 /* Remove the dispatcher label from the list of non-local labels
6434 since the receiver labels have been added to it above. */
6435 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6436 return const0_rtx;
6438 break;
6440 case BUILT_IN_SETJMP_RECEIVER:
6441 /* __builtin_setjmp_receiver is passed the receiver label. */
6442 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6444 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6445 rtx label_r = label_rtx (label);
6447 expand_builtin_setjmp_receiver (label_r);
6448 return const0_rtx;
6450 break;
6452 /* __builtin_longjmp is passed a pointer to an array of five words.
6453 It's similar to the C library longjmp function but works with
6454 __builtin_setjmp above. */
6455 case BUILT_IN_LONGJMP:
6456 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6458 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6459 VOIDmode, EXPAND_NORMAL);
6460 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6462 if (value != const1_rtx)
6464 error ("%<__builtin_longjmp%> second argument must be 1");
6465 return const0_rtx;
6468 expand_builtin_longjmp (buf_addr, value);
6469 return const0_rtx;
6471 break;
6473 case BUILT_IN_NONLOCAL_GOTO:
6474 target = expand_builtin_nonlocal_goto (exp);
6475 if (target)
6476 return target;
6477 break;
6479 /* This updates the setjmp buffer that is its argument with the value
6480 of the current stack pointer. */
6481 case BUILT_IN_UPDATE_SETJMP_BUF:
6482 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6484 rtx buf_addr
6485 = expand_normal (CALL_EXPR_ARG (exp, 0));
6487 expand_builtin_update_setjmp_buf (buf_addr);
6488 return const0_rtx;
6490 break;
6492 case BUILT_IN_TRAP:
6493 expand_builtin_trap ();
6494 return const0_rtx;
6496 case BUILT_IN_PRINTF:
6497 target = expand_builtin_printf (exp, target, mode, false);
6498 if (target)
6499 return target;
6500 break;
6502 case BUILT_IN_PRINTF_UNLOCKED:
6503 target = expand_builtin_printf (exp, target, mode, true);
6504 if (target)
6505 return target;
6506 break;
6508 case BUILT_IN_FPUTS:
6509 target = expand_builtin_fputs (exp, target, false);
6510 if (target)
6511 return target;
6512 break;
6513 case BUILT_IN_FPUTS_UNLOCKED:
6514 target = expand_builtin_fputs (exp, target, true);
6515 if (target)
6516 return target;
6517 break;
6519 case BUILT_IN_FPRINTF:
6520 target = expand_builtin_fprintf (exp, target, mode, false);
6521 if (target)
6522 return target;
6523 break;
6525 case BUILT_IN_FPRINTF_UNLOCKED:
6526 target = expand_builtin_fprintf (exp, target, mode, true);
6527 if (target)
6528 return target;
6529 break;
6531 case BUILT_IN_SPRINTF:
6532 target = expand_builtin_sprintf (exp, target, mode);
6533 if (target)
6534 return target;
6535 break;
6537 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6538 case BUILT_IN_SIGNBITD32:
6539 case BUILT_IN_SIGNBITD64:
6540 case BUILT_IN_SIGNBITD128:
6541 target = expand_builtin_signbit (exp, target);
6542 if (target)
6543 return target;
6544 break;
6546 /* Various hooks for the DWARF 2 __throw routine. */
6547 case BUILT_IN_UNWIND_INIT:
6548 expand_builtin_unwind_init ();
6549 return const0_rtx;
6550 case BUILT_IN_DWARF_CFA:
6551 return virtual_cfa_rtx;
6552 #ifdef DWARF2_UNWIND_INFO
6553 case BUILT_IN_DWARF_SP_COLUMN:
6554 return expand_builtin_dwarf_sp_column ();
6555 case BUILT_IN_INIT_DWARF_REG_SIZES:
6556 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6557 return const0_rtx;
6558 #endif
6559 case BUILT_IN_FROB_RETURN_ADDR:
6560 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6561 case BUILT_IN_EXTRACT_RETURN_ADDR:
6562 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6563 case BUILT_IN_EH_RETURN:
6564 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6565 CALL_EXPR_ARG (exp, 1));
6566 return const0_rtx;
6567 #ifdef EH_RETURN_DATA_REGNO
6568 case BUILT_IN_EH_RETURN_DATA_REGNO:
6569 return expand_builtin_eh_return_data_regno (exp);
6570 #endif
6571 case BUILT_IN_EXTEND_POINTER:
6572 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6574 case BUILT_IN_VA_START:
6575 case BUILT_IN_STDARG_START:
6576 return expand_builtin_va_start (exp);
6577 case BUILT_IN_VA_END:
6578 return expand_builtin_va_end (exp);
6579 case BUILT_IN_VA_COPY:
6580 return expand_builtin_va_copy (exp);
6581 case BUILT_IN_EXPECT:
6582 return expand_builtin_expect (exp, target);
6583 case BUILT_IN_PREFETCH:
6584 expand_builtin_prefetch (exp);
6585 return const0_rtx;
6587 case BUILT_IN_PROFILE_FUNC_ENTER:
6588 return expand_builtin_profile_func (false);
6589 case BUILT_IN_PROFILE_FUNC_EXIT:
6590 return expand_builtin_profile_func (true);
6592 case BUILT_IN_INIT_TRAMPOLINE:
6593 return expand_builtin_init_trampoline (exp);
6594 case BUILT_IN_ADJUST_TRAMPOLINE:
6595 return expand_builtin_adjust_trampoline (exp);
6597 case BUILT_IN_FORK:
6598 case BUILT_IN_EXECL:
6599 case BUILT_IN_EXECV:
6600 case BUILT_IN_EXECLP:
6601 case BUILT_IN_EXECLE:
6602 case BUILT_IN_EXECVP:
6603 case BUILT_IN_EXECVE:
6604 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6605 if (target)
6606 return target;
6607 break;
6609 case BUILT_IN_FETCH_AND_ADD_1:
6610 case BUILT_IN_FETCH_AND_ADD_2:
6611 case BUILT_IN_FETCH_AND_ADD_4:
6612 case BUILT_IN_FETCH_AND_ADD_8:
6613 case BUILT_IN_FETCH_AND_ADD_16:
6614 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6615 target = expand_builtin_sync_operation (mode, exp, PLUS,
6616 false, target, ignore);
6617 if (target)
6618 return target;
6619 break;
6621 case BUILT_IN_FETCH_AND_SUB_1:
6622 case BUILT_IN_FETCH_AND_SUB_2:
6623 case BUILT_IN_FETCH_AND_SUB_4:
6624 case BUILT_IN_FETCH_AND_SUB_8:
6625 case BUILT_IN_FETCH_AND_SUB_16:
6626 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6627 target = expand_builtin_sync_operation (mode, exp, MINUS,
6628 false, target, ignore);
6629 if (target)
6630 return target;
6631 break;
6633 case BUILT_IN_FETCH_AND_OR_1:
6634 case BUILT_IN_FETCH_AND_OR_2:
6635 case BUILT_IN_FETCH_AND_OR_4:
6636 case BUILT_IN_FETCH_AND_OR_8:
6637 case BUILT_IN_FETCH_AND_OR_16:
6638 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6639 target = expand_builtin_sync_operation (mode, exp, IOR,
6640 false, target, ignore);
6641 if (target)
6642 return target;
6643 break;
6645 case BUILT_IN_FETCH_AND_AND_1:
6646 case BUILT_IN_FETCH_AND_AND_2:
6647 case BUILT_IN_FETCH_AND_AND_4:
6648 case BUILT_IN_FETCH_AND_AND_8:
6649 case BUILT_IN_FETCH_AND_AND_16:
6650 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6651 target = expand_builtin_sync_operation (mode, exp, AND,
6652 false, target, ignore);
6653 if (target)
6654 return target;
6655 break;
6657 case BUILT_IN_FETCH_AND_XOR_1:
6658 case BUILT_IN_FETCH_AND_XOR_2:
6659 case BUILT_IN_FETCH_AND_XOR_4:
6660 case BUILT_IN_FETCH_AND_XOR_8:
6661 case BUILT_IN_FETCH_AND_XOR_16:
6662 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6663 target = expand_builtin_sync_operation (mode, exp, XOR,
6664 false, target, ignore);
6665 if (target)
6666 return target;
6667 break;
6669 case BUILT_IN_FETCH_AND_NAND_1:
6670 case BUILT_IN_FETCH_AND_NAND_2:
6671 case BUILT_IN_FETCH_AND_NAND_4:
6672 case BUILT_IN_FETCH_AND_NAND_8:
6673 case BUILT_IN_FETCH_AND_NAND_16:
6674 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6675 target = expand_builtin_sync_operation (mode, exp, NOT,
6676 false, target, ignore);
6677 if (target)
6678 return target;
6679 break;
6681 case BUILT_IN_ADD_AND_FETCH_1:
6682 case BUILT_IN_ADD_AND_FETCH_2:
6683 case BUILT_IN_ADD_AND_FETCH_4:
6684 case BUILT_IN_ADD_AND_FETCH_8:
6685 case BUILT_IN_ADD_AND_FETCH_16:
6686 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6687 target = expand_builtin_sync_operation (mode, exp, PLUS,
6688 true, target, ignore);
6689 if (target)
6690 return target;
6691 break;
6693 case BUILT_IN_SUB_AND_FETCH_1:
6694 case BUILT_IN_SUB_AND_FETCH_2:
6695 case BUILT_IN_SUB_AND_FETCH_4:
6696 case BUILT_IN_SUB_AND_FETCH_8:
6697 case BUILT_IN_SUB_AND_FETCH_16:
6698 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6699 target = expand_builtin_sync_operation (mode, exp, MINUS,
6700 true, target, ignore);
6701 if (target)
6702 return target;
6703 break;
6705 case BUILT_IN_OR_AND_FETCH_1:
6706 case BUILT_IN_OR_AND_FETCH_2:
6707 case BUILT_IN_OR_AND_FETCH_4:
6708 case BUILT_IN_OR_AND_FETCH_8:
6709 case BUILT_IN_OR_AND_FETCH_16:
6710 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6711 target = expand_builtin_sync_operation (mode, exp, IOR,
6712 true, target, ignore);
6713 if (target)
6714 return target;
6715 break;
6717 case BUILT_IN_AND_AND_FETCH_1:
6718 case BUILT_IN_AND_AND_FETCH_2:
6719 case BUILT_IN_AND_AND_FETCH_4:
6720 case BUILT_IN_AND_AND_FETCH_8:
6721 case BUILT_IN_AND_AND_FETCH_16:
6722 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6723 target = expand_builtin_sync_operation (mode, exp, AND,
6724 true, target, ignore);
6725 if (target)
6726 return target;
6727 break;
6729 case BUILT_IN_XOR_AND_FETCH_1:
6730 case BUILT_IN_XOR_AND_FETCH_2:
6731 case BUILT_IN_XOR_AND_FETCH_4:
6732 case BUILT_IN_XOR_AND_FETCH_8:
6733 case BUILT_IN_XOR_AND_FETCH_16:
6734 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6735 target = expand_builtin_sync_operation (mode, exp, XOR,
6736 true, target, ignore);
6737 if (target)
6738 return target;
6739 break;
6741 case BUILT_IN_NAND_AND_FETCH_1:
6742 case BUILT_IN_NAND_AND_FETCH_2:
6743 case BUILT_IN_NAND_AND_FETCH_4:
6744 case BUILT_IN_NAND_AND_FETCH_8:
6745 case BUILT_IN_NAND_AND_FETCH_16:
6746 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6747 target = expand_builtin_sync_operation (mode, exp, NOT,
6748 true, target, ignore);
6749 if (target)
6750 return target;
6751 break;
6753 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6754 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6755 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6756 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6757 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6758 if (mode == VOIDmode)
6759 mode = TYPE_MODE (boolean_type_node);
6760 if (!target || !register_operand (target, mode))
6761 target = gen_reg_rtx (mode);
6763 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6764 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6765 if (target)
6766 return target;
6767 break;
6769 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6770 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6771 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6772 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6773 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6774 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6775 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6776 if (target)
6777 return target;
6778 break;
6780 case BUILT_IN_LOCK_TEST_AND_SET_1:
6781 case BUILT_IN_LOCK_TEST_AND_SET_2:
6782 case BUILT_IN_LOCK_TEST_AND_SET_4:
6783 case BUILT_IN_LOCK_TEST_AND_SET_8:
6784 case BUILT_IN_LOCK_TEST_AND_SET_16:
6785 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6786 target = expand_builtin_lock_test_and_set (mode, exp, target);
6787 if (target)
6788 return target;
6789 break;
6791 case BUILT_IN_LOCK_RELEASE_1:
6792 case BUILT_IN_LOCK_RELEASE_2:
6793 case BUILT_IN_LOCK_RELEASE_4:
6794 case BUILT_IN_LOCK_RELEASE_8:
6795 case BUILT_IN_LOCK_RELEASE_16:
6796 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6797 expand_builtin_lock_release (mode, exp);
6798 return const0_rtx;
6800 case BUILT_IN_SYNCHRONIZE:
6801 expand_builtin_synchronize ();
6802 return const0_rtx;
6804 case BUILT_IN_OBJECT_SIZE:
6805 return expand_builtin_object_size (exp);
6807 case BUILT_IN_MEMCPY_CHK:
6808 case BUILT_IN_MEMPCPY_CHK:
6809 case BUILT_IN_MEMMOVE_CHK:
6810 case BUILT_IN_MEMSET_CHK:
6811 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6812 if (target)
6813 return target;
6814 break;
6816 case BUILT_IN_STRCPY_CHK:
6817 case BUILT_IN_STPCPY_CHK:
6818 case BUILT_IN_STRNCPY_CHK:
6819 case BUILT_IN_STRCAT_CHK:
6820 case BUILT_IN_STRNCAT_CHK:
6821 case BUILT_IN_SNPRINTF_CHK:
6822 case BUILT_IN_VSNPRINTF_CHK:
6823 maybe_emit_chk_warning (exp, fcode);
6824 break;
6826 case BUILT_IN_SPRINTF_CHK:
6827 case BUILT_IN_VSPRINTF_CHK:
6828 maybe_emit_sprintf_chk_warning (exp, fcode);
6829 break;
6831 default: /* just do library call, if unknown builtin */
6832 break;
6835 /* The switch statement above can drop through to cause the function
6836 to be called normally. */
6837 return expand_call (exp, target, ignore);
6840 /* Determine whether a tree node represents a call to a built-in
6841 function. If the tree T is a call to a built-in function with
6842 the right number of arguments of the appropriate types, return
6843 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6844 Otherwise the return value is END_BUILTINS. */
6846 enum built_in_function
6847 builtin_mathfn_code (tree t)
6849 tree fndecl, arg, parmlist;
6850 tree argtype, parmtype;
6851 call_expr_arg_iterator iter;
6853 if (TREE_CODE (t) != CALL_EXPR
6854 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6855 return END_BUILTINS;
6857 fndecl = get_callee_fndecl (t);
6858 if (fndecl == NULL_TREE
6859 || TREE_CODE (fndecl) != FUNCTION_DECL
6860 || ! DECL_BUILT_IN (fndecl)
6861 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6862 return END_BUILTINS;
6864 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6865 init_call_expr_arg_iterator (t, &iter);
6866 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6868 /* If a function doesn't take a variable number of arguments,
6869 the last element in the list will have type `void'. */
6870 parmtype = TREE_VALUE (parmlist);
6871 if (VOID_TYPE_P (parmtype))
6873 if (more_call_expr_args_p (&iter))
6874 return END_BUILTINS;
6875 return DECL_FUNCTION_CODE (fndecl);
6878 if (! more_call_expr_args_p (&iter))
6879 return END_BUILTINS;
6881 arg = next_call_expr_arg (&iter);
6882 argtype = TREE_TYPE (arg);
6884 if (SCALAR_FLOAT_TYPE_P (parmtype))
6886 if (! SCALAR_FLOAT_TYPE_P (argtype))
6887 return END_BUILTINS;
6889 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6891 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6892 return END_BUILTINS;
6894 else if (POINTER_TYPE_P (parmtype))
6896 if (! POINTER_TYPE_P (argtype))
6897 return END_BUILTINS;
6899 else if (INTEGRAL_TYPE_P (parmtype))
6901 if (! INTEGRAL_TYPE_P (argtype))
6902 return END_BUILTINS;
6904 else
6905 return END_BUILTINS;
6908 /* Variable-length argument list. */
6909 return DECL_FUNCTION_CODE (fndecl);
6912 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6913 evaluate to a constant. */
6915 static tree
6916 fold_builtin_constant_p (tree arg)
6918 /* We return 1 for a numeric type that's known to be a constant
6919 value at compile-time or for an aggregate type that's a
6920 literal constant. */
6921 STRIP_NOPS (arg);
6923 /* If we know this is a constant, emit the constant of one. */
6924 if (CONSTANT_CLASS_P (arg)
6925 || (TREE_CODE (arg) == CONSTRUCTOR
6926 && TREE_CONSTANT (arg)))
6927 return integer_one_node;
6928 if (TREE_CODE (arg) == ADDR_EXPR)
6930 tree op = TREE_OPERAND (arg, 0);
6931 if (TREE_CODE (op) == STRING_CST
6932 || (TREE_CODE (op) == ARRAY_REF
6933 && integer_zerop (TREE_OPERAND (op, 1))
6934 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6935 return integer_one_node;
6938 /* If this expression has side effects, show we don't know it to be a
6939 constant. Likewise if it's a pointer or aggregate type since in
6940 those case we only want literals, since those are only optimized
6941 when generating RTL, not later.
6942 And finally, if we are compiling an initializer, not code, we
6943 need to return a definite result now; there's not going to be any
6944 more optimization done. */
6945 if (TREE_SIDE_EFFECTS (arg)
6946 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6947 || POINTER_TYPE_P (TREE_TYPE (arg))
6948 || cfun == 0
6949 || folding_initializer)
6950 return integer_zero_node;
6952 return NULL_TREE;
6955 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
6956 comparison against the argument will fold to a constant. In practice,
6957 this means a true constant or the address of a non-weak symbol. */
6959 static tree
6960 fold_builtin_expect (tree arg)
6962 tree inner;
6964 /* If the argument isn't invariant, then there's nothing we can do. */
6965 if (!TREE_INVARIANT (arg))
6966 return NULL_TREE;
6968 /* If we're looking at an address of a weak decl, then do not fold. */
6969 inner = arg;
6970 STRIP_NOPS (inner);
6971 if (TREE_CODE (inner) == ADDR_EXPR)
6975 inner = TREE_OPERAND (inner, 0);
6977 while (TREE_CODE (inner) == COMPONENT_REF
6978 || TREE_CODE (inner) == ARRAY_REF);
6979 if (DECL_P (inner) && DECL_WEAK (inner))
6980 return NULL_TREE;
6983 /* Otherwise, ARG already has the proper type for the return value. */
6984 return arg;
6987 /* Fold a call to __builtin_classify_type with argument ARG. */
6989 static tree
6990 fold_builtin_classify_type (tree arg)
6992 if (arg == 0)
6993 return build_int_cst (NULL_TREE, no_type_class);
6995 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6998 /* Fold a call to __builtin_strlen with argument ARG. */
7000 static tree
7001 fold_builtin_strlen (tree arg)
7003 if (!validate_arg (arg, POINTER_TYPE))
7004 return NULL_TREE;
7005 else
7007 tree len = c_strlen (arg, 0);
7009 if (len)
7011 /* Convert from the internal "sizetype" type to "size_t". */
7012 if (size_type_node)
7013 len = fold_convert (size_type_node, len);
7014 return len;
7017 return NULL_TREE;
7021 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7023 static tree
7024 fold_builtin_inf (tree type, int warn)
7026 REAL_VALUE_TYPE real;
7028 /* __builtin_inff is intended to be usable to define INFINITY on all
7029 targets. If an infinity is not available, INFINITY expands "to a
7030 positive constant of type float that overflows at translation
7031 time", footnote "In this case, using INFINITY will violate the
7032 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7033 Thus we pedwarn to ensure this constraint violation is
7034 diagnosed. */
7035 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7036 pedwarn ("target format does not support infinity");
7038 real_inf (&real);
7039 return build_real (type, real);
7042 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7044 static tree
7045 fold_builtin_nan (tree arg, tree type, int quiet)
7047 REAL_VALUE_TYPE real;
7048 const char *str;
7050 if (!validate_arg (arg, POINTER_TYPE))
7051 return NULL_TREE;
7052 str = c_getstr (arg);
7053 if (!str)
7054 return NULL_TREE;
7056 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7057 return NULL_TREE;
7059 return build_real (type, real);
7062 /* Return true if the floating point expression T has an integer value.
7063 We also allow +Inf, -Inf and NaN to be considered integer values. */
7065 static bool
7066 integer_valued_real_p (tree t)
7068 switch (TREE_CODE (t))
7070 case FLOAT_EXPR:
7071 return true;
7073 case ABS_EXPR:
7074 case SAVE_EXPR:
7075 case NON_LVALUE_EXPR:
7076 return integer_valued_real_p (TREE_OPERAND (t, 0));
7078 case COMPOUND_EXPR:
7079 case MODIFY_EXPR:
7080 case BIND_EXPR:
7081 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7083 case PLUS_EXPR:
7084 case MINUS_EXPR:
7085 case MULT_EXPR:
7086 case MIN_EXPR:
7087 case MAX_EXPR:
7088 return integer_valued_real_p (TREE_OPERAND (t, 0))
7089 && integer_valued_real_p (TREE_OPERAND (t, 1));
7091 case COND_EXPR:
7092 return integer_valued_real_p (TREE_OPERAND (t, 1))
7093 && integer_valued_real_p (TREE_OPERAND (t, 2));
7095 case REAL_CST:
7096 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7098 case NOP_EXPR:
7100 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7101 if (TREE_CODE (type) == INTEGER_TYPE)
7102 return true;
7103 if (TREE_CODE (type) == REAL_TYPE)
7104 return integer_valued_real_p (TREE_OPERAND (t, 0));
7105 break;
7108 case CALL_EXPR:
7109 switch (builtin_mathfn_code (t))
7111 CASE_FLT_FN (BUILT_IN_CEIL):
7112 CASE_FLT_FN (BUILT_IN_FLOOR):
7113 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7114 CASE_FLT_FN (BUILT_IN_RINT):
7115 CASE_FLT_FN (BUILT_IN_ROUND):
7116 CASE_FLT_FN (BUILT_IN_TRUNC):
7117 return true;
7119 CASE_FLT_FN (BUILT_IN_FMIN):
7120 CASE_FLT_FN (BUILT_IN_FMAX):
7121 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7122 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7124 default:
7125 break;
7127 break;
7129 default:
7130 break;
7132 return false;
7135 /* FNDECL is assumed to be a builtin where truncation can be propagated
7136 across (for instance floor((double)f) == (double)floorf (f).
7137 Do the transformation for a call with argument ARG. */
7139 static tree
7140 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7142 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7144 if (!validate_arg (arg, REAL_TYPE))
7145 return NULL_TREE;
7147 /* Integer rounding functions are idempotent. */
7148 if (fcode == builtin_mathfn_code (arg))
7149 return arg;
7151 /* If argument is already integer valued, and we don't need to worry
7152 about setting errno, there's no need to perform rounding. */
7153 if (! flag_errno_math && integer_valued_real_p (arg))
7154 return arg;
7156 if (optimize)
7158 tree arg0 = strip_float_extensions (arg);
7159 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7160 tree newtype = TREE_TYPE (arg0);
7161 tree decl;
7163 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7164 && (decl = mathfn_built_in (newtype, fcode)))
7165 return fold_convert (ftype,
7166 build_call_expr (decl, 1,
7167 fold_convert (newtype, arg0)));
7169 return NULL_TREE;
7172 /* FNDECL is assumed to be builtin which can narrow the FP type of
7173 the argument, for instance lround((double)f) -> lroundf (f).
7174 Do the transformation for a call with argument ARG. */
7176 static tree
7177 fold_fixed_mathfn (tree fndecl, tree arg)
7179 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7181 if (!validate_arg (arg, REAL_TYPE))
7182 return NULL_TREE;
7184 /* If argument is already integer valued, and we don't need to worry
7185 about setting errno, there's no need to perform rounding. */
7186 if (! flag_errno_math && integer_valued_real_p (arg))
7187 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7189 if (optimize)
7191 tree ftype = TREE_TYPE (arg);
7192 tree arg0 = strip_float_extensions (arg);
7193 tree newtype = TREE_TYPE (arg0);
7194 tree decl;
7196 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7197 && (decl = mathfn_built_in (newtype, fcode)))
7198 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7201 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7202 sizeof (long long) == sizeof (long). */
7203 if (TYPE_PRECISION (long_long_integer_type_node)
7204 == TYPE_PRECISION (long_integer_type_node))
7206 tree newfn = NULL_TREE;
7207 switch (fcode)
7209 CASE_FLT_FN (BUILT_IN_LLCEIL):
7210 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7211 break;
7213 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7214 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7215 break;
7217 CASE_FLT_FN (BUILT_IN_LLROUND):
7218 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7219 break;
7221 CASE_FLT_FN (BUILT_IN_LLRINT):
7222 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7223 break;
7225 default:
7226 break;
7229 if (newfn)
7231 tree newcall = build_call_expr(newfn, 1, arg);
7232 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7236 return NULL_TREE;
7239 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7240 return type. Return NULL_TREE if no simplification can be made. */
7242 static tree
7243 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7245 tree res;
7247 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7248 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7249 return NULL_TREE;
7251 /* Calculate the result when the argument is a constant. */
7252 if (TREE_CODE (arg) == COMPLEX_CST
7253 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7254 type, mpfr_hypot)))
7255 return res;
7257 if (TREE_CODE (arg) == COMPLEX_EXPR)
7259 tree real = TREE_OPERAND (arg, 0);
7260 tree imag = TREE_OPERAND (arg, 1);
7262 /* If either part is zero, cabs is fabs of the other. */
7263 if (real_zerop (real))
7264 return fold_build1 (ABS_EXPR, type, imag);
7265 if (real_zerop (imag))
7266 return fold_build1 (ABS_EXPR, type, real);
7268 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7269 if (flag_unsafe_math_optimizations
7270 && operand_equal_p (real, imag, OEP_PURE_SAME))
7272 const REAL_VALUE_TYPE sqrt2_trunc
7273 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7274 STRIP_NOPS (real);
7275 return fold_build2 (MULT_EXPR, type,
7276 fold_build1 (ABS_EXPR, type, real),
7277 build_real (type, sqrt2_trunc));
7281 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7282 if (TREE_CODE (arg) == NEGATE_EXPR
7283 || TREE_CODE (arg) == CONJ_EXPR)
7284 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7286 /* Don't do this when optimizing for size. */
7287 if (flag_unsafe_math_optimizations
7288 && optimize && !optimize_size)
7290 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7292 if (sqrtfn != NULL_TREE)
7294 tree rpart, ipart, result;
7296 arg = builtin_save_expr (arg);
7298 rpart = fold_build1 (REALPART_EXPR, type, arg);
7299 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7301 rpart = builtin_save_expr (rpart);
7302 ipart = builtin_save_expr (ipart);
7304 result = fold_build2 (PLUS_EXPR, type,
7305 fold_build2 (MULT_EXPR, type,
7306 rpart, rpart),
7307 fold_build2 (MULT_EXPR, type,
7308 ipart, ipart));
7310 return build_call_expr (sqrtfn, 1, result);
7314 return NULL_TREE;
7317 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7318 Return NULL_TREE if no simplification can be made. */
7320 static tree
7321 fold_builtin_sqrt (tree arg, tree type)
7324 enum built_in_function fcode;
7325 tree res;
7327 if (!validate_arg (arg, REAL_TYPE))
7328 return NULL_TREE;
7330 /* Calculate the result when the argument is a constant. */
7331 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7332 return res;
7334 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7335 fcode = builtin_mathfn_code (arg);
7336 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7338 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7339 arg = fold_build2 (MULT_EXPR, type,
7340 CALL_EXPR_ARG (arg, 0),
7341 build_real (type, dconsthalf));
7342 return build_call_expr (expfn, 1, arg);
7345 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7346 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7348 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7350 if (powfn)
7352 tree arg0 = CALL_EXPR_ARG (arg, 0);
7353 tree tree_root;
7354 /* The inner root was either sqrt or cbrt. */
7355 REAL_VALUE_TYPE dconstroot =
7356 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
7358 /* Adjust for the outer root. */
7359 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7360 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7361 tree_root = build_real (type, dconstroot);
7362 return build_call_expr (powfn, 2, arg0, tree_root);
7366 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7367 if (flag_unsafe_math_optimizations
7368 && (fcode == BUILT_IN_POW
7369 || fcode == BUILT_IN_POWF
7370 || fcode == BUILT_IN_POWL))
7372 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7373 tree arg0 = CALL_EXPR_ARG (arg, 0);
7374 tree arg1 = CALL_EXPR_ARG (arg, 1);
7375 tree narg1;
7376 if (!tree_expr_nonnegative_p (arg0))
7377 arg0 = build1 (ABS_EXPR, type, arg0);
7378 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7379 build_real (type, dconsthalf));
7380 return build_call_expr (powfn, 2, arg0, narg1);
7383 return NULL_TREE;
7386 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7387 Return NULL_TREE if no simplification can be made. */
7389 static tree
7390 fold_builtin_cbrt (tree arg, tree type)
7392 const enum built_in_function fcode = builtin_mathfn_code (arg);
7393 tree res;
7395 if (!validate_arg (arg, REAL_TYPE))
7396 return NULL_TREE;
7398 /* Calculate the result when the argument is a constant. */
7399 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7400 return res;
7402 if (flag_unsafe_math_optimizations)
7404 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7405 if (BUILTIN_EXPONENT_P (fcode))
7407 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7408 const REAL_VALUE_TYPE third_trunc =
7409 real_value_truncate (TYPE_MODE (type), dconstthird);
7410 arg = fold_build2 (MULT_EXPR, type,
7411 CALL_EXPR_ARG (arg, 0),
7412 build_real (type, third_trunc));
7413 return build_call_expr (expfn, 1, arg);
7416 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7417 if (BUILTIN_SQRT_P (fcode))
7419 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7421 if (powfn)
7423 tree arg0 = CALL_EXPR_ARG (arg, 0);
7424 tree tree_root;
7425 REAL_VALUE_TYPE dconstroot = dconstthird;
7427 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7428 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7429 tree_root = build_real (type, dconstroot);
7430 return build_call_expr (powfn, 2, arg0, tree_root);
7434 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7435 if (BUILTIN_CBRT_P (fcode))
7437 tree arg0 = CALL_EXPR_ARG (arg, 0);
7438 if (tree_expr_nonnegative_p (arg0))
7440 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7442 if (powfn)
7444 tree tree_root;
7445 REAL_VALUE_TYPE dconstroot;
7447 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
7448 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7449 tree_root = build_real (type, dconstroot);
7450 return build_call_expr (powfn, 2, arg0, tree_root);
7455 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7456 if (fcode == BUILT_IN_POW
7457 || fcode == BUILT_IN_POWF
7458 || fcode == BUILT_IN_POWL)
7460 tree arg00 = CALL_EXPR_ARG (arg, 0);
7461 tree arg01 = CALL_EXPR_ARG (arg, 1);
7462 if (tree_expr_nonnegative_p (arg00))
7464 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7465 const REAL_VALUE_TYPE dconstroot
7466 = real_value_truncate (TYPE_MODE (type), dconstthird);
7467 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7468 build_real (type, dconstroot));
7469 return build_call_expr (powfn, 2, arg00, narg01);
7473 return NULL_TREE;
7476 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7477 TYPE is the type of the return value. Return NULL_TREE if no
7478 simplification can be made. */
7480 static tree
7481 fold_builtin_cos (tree arg, tree type, tree fndecl)
7483 tree res, narg;
7485 if (!validate_arg (arg, REAL_TYPE))
7486 return NULL_TREE;
7488 /* Calculate the result when the argument is a constant. */
7489 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7490 return res;
7492 /* Optimize cos(-x) into cos (x). */
7493 if ((narg = fold_strip_sign_ops (arg)))
7494 return build_call_expr (fndecl, 1, narg);
7496 return NULL_TREE;
7499 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7500 Return NULL_TREE if no simplification can be made. */
7502 static tree
7503 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7505 if (validate_arg (arg, REAL_TYPE))
7507 tree res, narg;
7509 /* Calculate the result when the argument is a constant. */
7510 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7511 return res;
7513 /* Optimize cosh(-x) into cosh (x). */
7514 if ((narg = fold_strip_sign_ops (arg)))
7515 return build_call_expr (fndecl, 1, narg);
7518 return NULL_TREE;
7521 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7522 Return NULL_TREE if no simplification can be made. */
7524 static tree
7525 fold_builtin_tan (tree arg, tree type)
7527 enum built_in_function fcode;
7528 tree res;
7530 if (!validate_arg (arg, REAL_TYPE))
7531 return NULL_TREE;
7533 /* Calculate the result when the argument is a constant. */
7534 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7535 return res;
7537 /* Optimize tan(atan(x)) = x. */
7538 fcode = builtin_mathfn_code (arg);
7539 if (flag_unsafe_math_optimizations
7540 && (fcode == BUILT_IN_ATAN
7541 || fcode == BUILT_IN_ATANF
7542 || fcode == BUILT_IN_ATANL))
7543 return CALL_EXPR_ARG (arg, 0);
7545 return NULL_TREE;
7548 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7549 NULL_TREE if no simplification can be made. */
7551 static tree
7552 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7554 tree type;
7555 tree res, fn, call;
7557 if (!validate_arg (arg0, REAL_TYPE)
7558 || !validate_arg (arg1, POINTER_TYPE)
7559 || !validate_arg (arg2, POINTER_TYPE))
7560 return NULL_TREE;
7562 type = TREE_TYPE (arg0);
7564 /* Calculate the result when the argument is a constant. */
7565 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7566 return res;
7568 /* Canonicalize sincos to cexpi. */
7569 if (!TARGET_C99_FUNCTIONS)
7570 return NULL_TREE;
7571 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7572 if (!fn)
7573 return NULL_TREE;
7575 call = build_call_expr (fn, 1, arg0);
7576 call = builtin_save_expr (call);
7578 return build2 (COMPOUND_EXPR, type,
7579 build2 (MODIFY_EXPR, void_type_node,
7580 build_fold_indirect_ref (arg1),
7581 build1 (IMAGPART_EXPR, type, call)),
7582 build2 (MODIFY_EXPR, void_type_node,
7583 build_fold_indirect_ref (arg2),
7584 build1 (REALPART_EXPR, type, call)));
7587 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7588 NULL_TREE if no simplification can be made. */
7590 static tree
7591 fold_builtin_cexp (tree arg0, tree type)
7593 tree rtype;
7594 tree realp, imagp, ifn;
7596 if (!validate_arg (arg0, COMPLEX_TYPE))
7597 return NULL_TREE;
7599 rtype = TREE_TYPE (TREE_TYPE (arg0));
7601 /* In case we can figure out the real part of arg0 and it is constant zero
7602 fold to cexpi. */
7603 if (!TARGET_C99_FUNCTIONS)
7604 return NULL_TREE;
7605 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7606 if (!ifn)
7607 return NULL_TREE;
7609 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7610 && real_zerop (realp))
7612 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7613 return build_call_expr (ifn, 1, narg);
7616 /* In case we can easily decompose real and imaginary parts split cexp
7617 to exp (r) * cexpi (i). */
7618 if (flag_unsafe_math_optimizations
7619 && realp)
7621 tree rfn, rcall, icall;
7623 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7624 if (!rfn)
7625 return NULL_TREE;
7627 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7628 if (!imagp)
7629 return NULL_TREE;
7631 icall = build_call_expr (ifn, 1, imagp);
7632 icall = builtin_save_expr (icall);
7633 rcall = build_call_expr (rfn, 1, realp);
7634 rcall = builtin_save_expr (rcall);
7635 return build2 (COMPLEX_EXPR, type,
7636 build2 (MULT_EXPR, rtype,
7637 rcall,
7638 build1 (REALPART_EXPR, rtype, icall)),
7639 build2 (MULT_EXPR, rtype,
7640 rcall,
7641 build1 (IMAGPART_EXPR, rtype, icall)));
7644 return NULL_TREE;
7647 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7648 Return NULL_TREE if no simplification can be made. */
7650 static tree
7651 fold_builtin_trunc (tree fndecl, tree arg)
7653 if (!validate_arg (arg, REAL_TYPE))
7654 return NULL_TREE;
7656 /* Optimize trunc of constant value. */
7657 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7659 REAL_VALUE_TYPE r, x;
7660 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7662 x = TREE_REAL_CST (arg);
7663 real_trunc (&r, TYPE_MODE (type), &x);
7664 return build_real (type, r);
7667 return fold_trunc_transparent_mathfn (fndecl, arg);
7670 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7671 Return NULL_TREE if no simplification can be made. */
7673 static tree
7674 fold_builtin_floor (tree fndecl, tree arg)
7676 if (!validate_arg (arg, REAL_TYPE))
7677 return NULL_TREE;
7679 /* Optimize floor of constant value. */
7680 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7682 REAL_VALUE_TYPE x;
7684 x = TREE_REAL_CST (arg);
7685 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7687 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7688 REAL_VALUE_TYPE r;
7690 real_floor (&r, TYPE_MODE (type), &x);
7691 return build_real (type, r);
7695 /* Fold floor (x) where x is nonnegative to trunc (x). */
7696 if (tree_expr_nonnegative_p (arg))
7698 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7699 if (truncfn)
7700 return build_call_expr (truncfn, 1, arg);
7703 return fold_trunc_transparent_mathfn (fndecl, arg);
7706 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7707 Return NULL_TREE if no simplification can be made. */
7709 static tree
7710 fold_builtin_ceil (tree fndecl, tree arg)
7712 if (!validate_arg (arg, REAL_TYPE))
7713 return NULL_TREE;
7715 /* Optimize ceil of constant value. */
7716 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7718 REAL_VALUE_TYPE x;
7720 x = TREE_REAL_CST (arg);
7721 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7723 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7724 REAL_VALUE_TYPE r;
7726 real_ceil (&r, TYPE_MODE (type), &x);
7727 return build_real (type, r);
7731 return fold_trunc_transparent_mathfn (fndecl, arg);
7734 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7735 Return NULL_TREE if no simplification can be made. */
7737 static tree
7738 fold_builtin_round (tree fndecl, tree arg)
7740 if (!validate_arg (arg, REAL_TYPE))
7741 return NULL_TREE;
7743 /* Optimize round of constant value. */
7744 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7746 REAL_VALUE_TYPE x;
7748 x = TREE_REAL_CST (arg);
7749 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7751 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7752 REAL_VALUE_TYPE r;
7754 real_round (&r, TYPE_MODE (type), &x);
7755 return build_real (type, r);
7759 return fold_trunc_transparent_mathfn (fndecl, arg);
7762 /* Fold function call to builtin lround, lroundf or lroundl (or the
7763 corresponding long long versions) and other rounding functions. ARG
7764 is the argument to the call. Return NULL_TREE if no simplification
7765 can be made. */
7767 static tree
7768 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7770 if (!validate_arg (arg, REAL_TYPE))
7771 return NULL_TREE;
7773 /* Optimize lround of constant value. */
7774 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7776 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7778 if (! REAL_VALUE_ISNAN (x) && ! REAL_VALUE_ISINF (x))
7780 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7781 tree ftype = TREE_TYPE (arg);
7782 unsigned HOST_WIDE_INT lo2;
7783 HOST_WIDE_INT hi, lo;
7784 REAL_VALUE_TYPE r;
7786 switch (DECL_FUNCTION_CODE (fndecl))
7788 CASE_FLT_FN (BUILT_IN_LFLOOR):
7789 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7790 real_floor (&r, TYPE_MODE (ftype), &x);
7791 break;
7793 CASE_FLT_FN (BUILT_IN_LCEIL):
7794 CASE_FLT_FN (BUILT_IN_LLCEIL):
7795 real_ceil (&r, TYPE_MODE (ftype), &x);
7796 break;
7798 CASE_FLT_FN (BUILT_IN_LROUND):
7799 CASE_FLT_FN (BUILT_IN_LLROUND):
7800 real_round (&r, TYPE_MODE (ftype), &x);
7801 break;
7803 default:
7804 gcc_unreachable ();
7807 REAL_VALUE_TO_INT (&lo, &hi, r);
7808 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7809 return build_int_cst_wide (itype, lo2, hi);
7813 switch (DECL_FUNCTION_CODE (fndecl))
7815 CASE_FLT_FN (BUILT_IN_LFLOOR):
7816 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7817 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7818 if (tree_expr_nonnegative_p (arg))
7819 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7820 arg);
7821 break;
7822 default:;
7825 return fold_fixed_mathfn (fndecl, arg);
7828 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7829 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7830 the argument to the call. Return NULL_TREE if no simplification can
7831 be made. */
7833 static tree
7834 fold_builtin_bitop (tree fndecl, tree arg)
7836 if (!validate_arg (arg, INTEGER_TYPE))
7837 return NULL_TREE;
7839 /* Optimize for constant argument. */
7840 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7842 HOST_WIDE_INT hi, width, result;
7843 unsigned HOST_WIDE_INT lo;
7844 tree type;
7846 type = TREE_TYPE (arg);
7847 width = TYPE_PRECISION (type);
7848 lo = TREE_INT_CST_LOW (arg);
7850 /* Clear all the bits that are beyond the type's precision. */
7851 if (width > HOST_BITS_PER_WIDE_INT)
7853 hi = TREE_INT_CST_HIGH (arg);
7854 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7855 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7857 else
7859 hi = 0;
7860 if (width < HOST_BITS_PER_WIDE_INT)
7861 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7864 switch (DECL_FUNCTION_CODE (fndecl))
7866 CASE_INT_FN (BUILT_IN_FFS):
7867 if (lo != 0)
7868 result = exact_log2 (lo & -lo) + 1;
7869 else if (hi != 0)
7870 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7871 else
7872 result = 0;
7873 break;
7875 CASE_INT_FN (BUILT_IN_CLZ):
7876 if (hi != 0)
7877 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7878 else if (lo != 0)
7879 result = width - floor_log2 (lo) - 1;
7880 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7881 result = width;
7882 break;
7884 CASE_INT_FN (BUILT_IN_CTZ):
7885 if (lo != 0)
7886 result = exact_log2 (lo & -lo);
7887 else if (hi != 0)
7888 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7889 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7890 result = width;
7891 break;
7893 CASE_INT_FN (BUILT_IN_POPCOUNT):
7894 result = 0;
7895 while (lo)
7896 result++, lo &= lo - 1;
7897 while (hi)
7898 result++, hi &= hi - 1;
7899 break;
7901 CASE_INT_FN (BUILT_IN_PARITY):
7902 result = 0;
7903 while (lo)
7904 result++, lo &= lo - 1;
7905 while (hi)
7906 result++, hi &= hi - 1;
7907 result &= 1;
7908 break;
7910 default:
7911 gcc_unreachable ();
7914 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7917 return NULL_TREE;
7920 /* Fold function call to builtin_bswap and the long and long long
7921 variants. Return NULL_TREE if no simplification can be made. */
7922 static tree
7923 fold_builtin_bswap (tree fndecl, tree arg)
7925 if (! validate_arg (arg, INTEGER_TYPE))
7926 return NULL_TREE;
7928 /* Optimize constant value. */
7929 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7931 HOST_WIDE_INT hi, width, r_hi = 0;
7932 unsigned HOST_WIDE_INT lo, r_lo = 0;
7933 tree type;
7935 type = TREE_TYPE (arg);
7936 width = TYPE_PRECISION (type);
7937 lo = TREE_INT_CST_LOW (arg);
7938 hi = TREE_INT_CST_HIGH (arg);
7940 switch (DECL_FUNCTION_CODE (fndecl))
7942 case BUILT_IN_BSWAP32:
7943 case BUILT_IN_BSWAP64:
7945 int s;
7947 for (s = 0; s < width; s += 8)
7949 int d = width - s - 8;
7950 unsigned HOST_WIDE_INT byte;
7952 if (s < HOST_BITS_PER_WIDE_INT)
7953 byte = (lo >> s) & 0xff;
7954 else
7955 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7957 if (d < HOST_BITS_PER_WIDE_INT)
7958 r_lo |= byte << d;
7959 else
7960 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7964 break;
7966 default:
7967 gcc_unreachable ();
7970 if (width < HOST_BITS_PER_WIDE_INT)
7971 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7972 else
7973 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7976 return NULL_TREE;
7979 /* Return true if EXPR is the real constant contained in VALUE. */
7981 static bool
7982 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
7984 STRIP_NOPS (expr);
7986 return ((TREE_CODE (expr) == REAL_CST
7987 && !TREE_OVERFLOW (expr)
7988 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
7989 || (TREE_CODE (expr) == COMPLEX_CST
7990 && real_dconstp (TREE_REALPART (expr), value)
7991 && real_zerop (TREE_IMAGPART (expr))));
7994 /* A subroutine of fold_builtin to fold the various logarithmic
7995 functions. Return NULL_TREE if no simplification can me made.
7996 FUNC is the corresponding MPFR logarithm function. */
7998 static tree
7999 fold_builtin_logarithm (tree fndecl, tree arg,
8000 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8002 if (validate_arg (arg, REAL_TYPE))
8004 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8005 tree res;
8006 const enum built_in_function fcode = builtin_mathfn_code (arg);
8008 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8009 instead we'll look for 'e' truncated to MODE. So only do
8010 this if flag_unsafe_math_optimizations is set. */
8011 if (flag_unsafe_math_optimizations && func == mpfr_log)
8013 const REAL_VALUE_TYPE e_truncated =
8014 real_value_truncate (TYPE_MODE (type), dconste);
8015 if (real_dconstp (arg, &e_truncated))
8016 return build_real (type, dconst1);
8019 /* Calculate the result when the argument is a constant. */
8020 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8021 return res;
8023 /* Special case, optimize logN(expN(x)) = x. */
8024 if (flag_unsafe_math_optimizations
8025 && ((func == mpfr_log
8026 && (fcode == BUILT_IN_EXP
8027 || fcode == BUILT_IN_EXPF
8028 || fcode == BUILT_IN_EXPL))
8029 || (func == mpfr_log2
8030 && (fcode == BUILT_IN_EXP2
8031 || fcode == BUILT_IN_EXP2F
8032 || fcode == BUILT_IN_EXP2L))
8033 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8034 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8036 /* Optimize logN(func()) for various exponential functions. We
8037 want to determine the value "x" and the power "exponent" in
8038 order to transform logN(x**exponent) into exponent*logN(x). */
8039 if (flag_unsafe_math_optimizations)
8041 tree exponent = 0, x = 0;
8043 switch (fcode)
8045 CASE_FLT_FN (BUILT_IN_EXP):
8046 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8047 x = build_real (type,
8048 real_value_truncate (TYPE_MODE (type), dconste));
8049 exponent = CALL_EXPR_ARG (arg, 0);
8050 break;
8051 CASE_FLT_FN (BUILT_IN_EXP2):
8052 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8053 x = build_real (type, dconst2);
8054 exponent = CALL_EXPR_ARG (arg, 0);
8055 break;
8056 CASE_FLT_FN (BUILT_IN_EXP10):
8057 CASE_FLT_FN (BUILT_IN_POW10):
8058 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8059 x = build_real (type, dconst10);
8060 exponent = CALL_EXPR_ARG (arg, 0);
8061 break;
8062 CASE_FLT_FN (BUILT_IN_SQRT):
8063 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8064 x = CALL_EXPR_ARG (arg, 0);
8065 exponent = build_real (type, dconsthalf);
8066 break;
8067 CASE_FLT_FN (BUILT_IN_CBRT):
8068 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8069 x = CALL_EXPR_ARG (arg, 0);
8070 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8071 dconstthird));
8072 break;
8073 CASE_FLT_FN (BUILT_IN_POW):
8074 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8075 x = CALL_EXPR_ARG (arg, 0);
8076 exponent = CALL_EXPR_ARG (arg, 1);
8077 break;
8078 default:
8079 break;
8082 /* Now perform the optimization. */
8083 if (x && exponent)
8085 tree logfn = build_call_expr (fndecl, 1, x);
8086 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8091 return NULL_TREE;
8094 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8095 NULL_TREE if no simplification can be made. */
8097 static tree
8098 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8100 tree res, narg0, narg1;
8102 if (!validate_arg (arg0, REAL_TYPE)
8103 || !validate_arg (arg1, REAL_TYPE))
8104 return NULL_TREE;
8106 /* Calculate the result when the argument is a constant. */
8107 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8108 return res;
8110 /* If either argument to hypot has a negate or abs, strip that off.
8111 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8112 narg0 = fold_strip_sign_ops (arg0);
8113 narg1 = fold_strip_sign_ops (arg1);
8114 if (narg0 || narg1)
8116 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8117 narg1 ? narg1 : arg1);
8120 /* If either argument is zero, hypot is fabs of the other. */
8121 if (real_zerop (arg0))
8122 return fold_build1 (ABS_EXPR, type, arg1);
8123 else if (real_zerop (arg1))
8124 return fold_build1 (ABS_EXPR, type, arg0);
8126 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8127 if (flag_unsafe_math_optimizations
8128 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8130 const REAL_VALUE_TYPE sqrt2_trunc
8131 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
8132 return fold_build2 (MULT_EXPR, type,
8133 fold_build1 (ABS_EXPR, type, arg0),
8134 build_real (type, sqrt2_trunc));
8137 return NULL_TREE;
8141 /* Fold a builtin function call to pow, powf, or powl. Return
8142 NULL_TREE if no simplification can be made. */
8143 static tree
8144 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8146 tree res;
8148 if (!validate_arg (arg0, REAL_TYPE)
8149 || !validate_arg (arg1, REAL_TYPE))
8150 return NULL_TREE;
8152 /* Calculate the result when the argument is a constant. */
8153 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8154 return res;
8156 /* Optimize pow(1.0,y) = 1.0. */
8157 if (real_onep (arg0))
8158 return omit_one_operand (type, build_real (type, dconst1), arg1);
8160 if (TREE_CODE (arg1) == REAL_CST
8161 && !TREE_OVERFLOW (arg1))
8163 REAL_VALUE_TYPE cint;
8164 REAL_VALUE_TYPE c;
8165 HOST_WIDE_INT n;
8167 c = TREE_REAL_CST (arg1);
8169 /* Optimize pow(x,0.0) = 1.0. */
8170 if (REAL_VALUES_EQUAL (c, dconst0))
8171 return omit_one_operand (type, build_real (type, dconst1),
8172 arg0);
8174 /* Optimize pow(x,1.0) = x. */
8175 if (REAL_VALUES_EQUAL (c, dconst1))
8176 return arg0;
8178 /* Optimize pow(x,-1.0) = 1.0/x. */
8179 if (REAL_VALUES_EQUAL (c, dconstm1))
8180 return fold_build2 (RDIV_EXPR, type,
8181 build_real (type, dconst1), arg0);
8183 /* Optimize pow(x,0.5) = sqrt(x). */
8184 if (flag_unsafe_math_optimizations
8185 && REAL_VALUES_EQUAL (c, dconsthalf))
8187 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8189 if (sqrtfn != NULL_TREE)
8190 return build_call_expr (sqrtfn, 1, arg0);
8193 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8194 if (flag_unsafe_math_optimizations)
8196 const REAL_VALUE_TYPE dconstroot
8197 = real_value_truncate (TYPE_MODE (type), dconstthird);
8199 if (REAL_VALUES_EQUAL (c, dconstroot))
8201 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8202 if (cbrtfn != NULL_TREE)
8203 return build_call_expr (cbrtfn, 1, arg0);
8207 /* Check for an integer exponent. */
8208 n = real_to_integer (&c);
8209 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8210 if (real_identical (&c, &cint))
8212 /* Attempt to evaluate pow at compile-time. */
8213 if (TREE_CODE (arg0) == REAL_CST
8214 && !TREE_OVERFLOW (arg0))
8216 REAL_VALUE_TYPE x;
8217 bool inexact;
8219 x = TREE_REAL_CST (arg0);
8220 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8221 if (flag_unsafe_math_optimizations || !inexact)
8222 return build_real (type, x);
8225 /* Strip sign ops from even integer powers. */
8226 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8228 tree narg0 = fold_strip_sign_ops (arg0);
8229 if (narg0)
8230 return build_call_expr (fndecl, 2, narg0, arg1);
8235 if (flag_unsafe_math_optimizations)
8237 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8239 /* Optimize pow(expN(x),y) = expN(x*y). */
8240 if (BUILTIN_EXPONENT_P (fcode))
8242 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8243 tree arg = CALL_EXPR_ARG (arg0, 0);
8244 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8245 return build_call_expr (expfn, 1, arg);
8248 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8249 if (BUILTIN_SQRT_P (fcode))
8251 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8252 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8253 build_real (type, dconsthalf));
8254 return build_call_expr (fndecl, 2, narg0, narg1);
8257 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8258 if (BUILTIN_CBRT_P (fcode))
8260 tree arg = CALL_EXPR_ARG (arg0, 0);
8261 if (tree_expr_nonnegative_p (arg))
8263 const REAL_VALUE_TYPE dconstroot
8264 = real_value_truncate (TYPE_MODE (type), dconstthird);
8265 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8266 build_real (type, dconstroot));
8267 return build_call_expr (fndecl, 2, arg, narg1);
8271 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8272 if (fcode == BUILT_IN_POW
8273 || fcode == BUILT_IN_POWF
8274 || fcode == BUILT_IN_POWL)
8276 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8277 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8278 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8279 return build_call_expr (fndecl, 2, arg00, narg1);
8283 return NULL_TREE;
8286 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8287 Return NULL_TREE if no simplification can be made. */
8288 static tree
8289 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8290 tree arg0, tree arg1, tree type)
8292 if (!validate_arg (arg0, REAL_TYPE)
8293 || !validate_arg (arg1, INTEGER_TYPE))
8294 return NULL_TREE;
8296 /* Optimize pow(1.0,y) = 1.0. */
8297 if (real_onep (arg0))
8298 return omit_one_operand (type, build_real (type, dconst1), arg1);
8300 if (host_integerp (arg1, 0))
8302 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8304 /* Evaluate powi at compile-time. */
8305 if (TREE_CODE (arg0) == REAL_CST
8306 && !TREE_OVERFLOW (arg0))
8308 REAL_VALUE_TYPE x;
8309 x = TREE_REAL_CST (arg0);
8310 real_powi (&x, TYPE_MODE (type), &x, c);
8311 return build_real (type, x);
8314 /* Optimize pow(x,0) = 1.0. */
8315 if (c == 0)
8316 return omit_one_operand (type, build_real (type, dconst1),
8317 arg0);
8319 /* Optimize pow(x,1) = x. */
8320 if (c == 1)
8321 return arg0;
8323 /* Optimize pow(x,-1) = 1.0/x. */
8324 if (c == -1)
8325 return fold_build2 (RDIV_EXPR, type,
8326 build_real (type, dconst1), arg0);
8329 return NULL_TREE;
8332 /* A subroutine of fold_builtin to fold the various exponent
8333 functions. Return NULL_TREE if no simplification can be made.
8334 FUNC is the corresponding MPFR exponent function. */
8336 static tree
8337 fold_builtin_exponent (tree fndecl, tree arg,
8338 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8340 if (validate_arg (arg, REAL_TYPE))
8342 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8343 tree res;
8345 /* Calculate the result when the argument is a constant. */
8346 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8347 return res;
8349 /* Optimize expN(logN(x)) = x. */
8350 if (flag_unsafe_math_optimizations)
8352 const enum built_in_function fcode = builtin_mathfn_code (arg);
8354 if ((func == mpfr_exp
8355 && (fcode == BUILT_IN_LOG
8356 || fcode == BUILT_IN_LOGF
8357 || fcode == BUILT_IN_LOGL))
8358 || (func == mpfr_exp2
8359 && (fcode == BUILT_IN_LOG2
8360 || fcode == BUILT_IN_LOG2F
8361 || fcode == BUILT_IN_LOG2L))
8362 || (func == mpfr_exp10
8363 && (fcode == BUILT_IN_LOG10
8364 || fcode == BUILT_IN_LOG10F
8365 || fcode == BUILT_IN_LOG10L)))
8366 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8370 return NULL_TREE;
8373 /* Return true if VAR is a VAR_DECL or a component thereof. */
8375 static bool
8376 var_decl_component_p (tree var)
8378 tree inner = var;
8379 while (handled_component_p (inner))
8380 inner = TREE_OPERAND (inner, 0);
8381 return SSA_VAR_P (inner);
8384 /* Fold function call to builtin memset. Return
8385 NULL_TREE if no simplification can be made. */
8387 static tree
8388 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8390 tree var, ret;
8391 unsigned HOST_WIDE_INT length, cval;
8393 if (! validate_arg (dest, POINTER_TYPE)
8394 || ! validate_arg (c, INTEGER_TYPE)
8395 || ! validate_arg (len, INTEGER_TYPE))
8396 return NULL_TREE;
8398 if (! host_integerp (len, 1))
8399 return NULL_TREE;
8401 /* If the LEN parameter is zero, return DEST. */
8402 if (integer_zerop (len))
8403 return omit_one_operand (type, dest, c);
8405 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8406 return NULL_TREE;
8408 var = dest;
8409 STRIP_NOPS (var);
8410 if (TREE_CODE (var) != ADDR_EXPR)
8411 return NULL_TREE;
8413 var = TREE_OPERAND (var, 0);
8414 if (TREE_THIS_VOLATILE (var))
8415 return NULL_TREE;
8417 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8418 && !POINTER_TYPE_P (TREE_TYPE (var)))
8419 return NULL_TREE;
8421 if (! var_decl_component_p (var))
8422 return NULL_TREE;
8424 length = tree_low_cst (len, 1);
8425 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8426 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8427 < (int) length)
8428 return NULL_TREE;
8430 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8431 return NULL_TREE;
8433 if (integer_zerop (c))
8434 cval = 0;
8435 else
8437 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8438 return NULL_TREE;
8440 cval = tree_low_cst (c, 1);
8441 cval &= 0xff;
8442 cval |= cval << 8;
8443 cval |= cval << 16;
8444 cval |= (cval << 31) << 1;
8447 ret = build_int_cst_type (TREE_TYPE (var), cval);
8448 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8449 if (ignore)
8450 return ret;
8452 return omit_one_operand (type, dest, ret);
8455 /* Fold function call to builtin memset. Return
8456 NULL_TREE if no simplification can be made. */
8458 static tree
8459 fold_builtin_bzero (tree dest, tree size, bool ignore)
8461 if (! validate_arg (dest, POINTER_TYPE)
8462 || ! validate_arg (size, INTEGER_TYPE))
8463 return NULL_TREE;
8465 if (!ignore)
8466 return NULL_TREE;
8468 /* New argument list transforming bzero(ptr x, int y) to
8469 memset(ptr x, int 0, size_t y). This is done this way
8470 so that if it isn't expanded inline, we fallback to
8471 calling bzero instead of memset. */
8473 return fold_builtin_memset (dest, integer_zero_node,
8474 fold_convert (sizetype, size),
8475 void_type_node, ignore);
8478 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8479 NULL_TREE if no simplification can be made.
8480 If ENDP is 0, return DEST (like memcpy).
8481 If ENDP is 1, return DEST+LEN (like mempcpy).
8482 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8483 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8484 (memmove). */
8486 static tree
8487 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8489 tree destvar, srcvar, expr;
8491 if (! validate_arg (dest, POINTER_TYPE)
8492 || ! validate_arg (src, POINTER_TYPE)
8493 || ! validate_arg (len, INTEGER_TYPE))
8494 return NULL_TREE;
8496 /* If the LEN parameter is zero, return DEST. */
8497 if (integer_zerop (len))
8498 return omit_one_operand (type, dest, src);
8500 /* If SRC and DEST are the same (and not volatile), return
8501 DEST{,+LEN,+LEN-1}. */
8502 if (operand_equal_p (src, dest, 0))
8503 expr = len;
8504 else
8506 tree srctype, desttype;
8507 if (endp == 3)
8509 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8510 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8512 /* Both DEST and SRC must be pointer types.
8513 ??? This is what old code did. Is the testing for pointer types
8514 really mandatory?
8516 If either SRC is readonly or length is 1, we can use memcpy. */
8517 if (dest_align && src_align
8518 && (readonly_data_expr (src)
8519 || (host_integerp (len, 1)
8520 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8521 tree_low_cst (len, 1)))))
8523 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8524 if (!fn)
8525 return NULL_TREE;
8526 return build_call_expr (fn, 3, dest, src, len);
8528 return NULL_TREE;
8531 if (!host_integerp (len, 0))
8532 return NULL_TREE;
8533 /* FIXME:
8534 This logic lose for arguments like (type *)malloc (sizeof (type)),
8535 since we strip the casts of up to VOID return value from malloc.
8536 Perhaps we ought to inherit type from non-VOID argument here? */
8537 STRIP_NOPS (src);
8538 STRIP_NOPS (dest);
8539 srctype = TREE_TYPE (TREE_TYPE (src));
8540 desttype = TREE_TYPE (TREE_TYPE (dest));
8541 if (!srctype || !desttype
8542 || !TYPE_SIZE_UNIT (srctype)
8543 || !TYPE_SIZE_UNIT (desttype)
8544 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8545 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8546 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8547 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8548 return NULL_TREE;
8550 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8551 < (int) TYPE_ALIGN (desttype)
8552 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8553 < (int) TYPE_ALIGN (srctype)))
8554 return NULL_TREE;
8556 if (!ignore)
8557 dest = builtin_save_expr (dest);
8559 srcvar = build_fold_indirect_ref (src);
8560 if (TREE_THIS_VOLATILE (srcvar))
8561 return NULL_TREE;
8562 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8563 return NULL_TREE;
8564 /* With memcpy, it is possible to bypass aliasing rules, so without
8565 this check i. e. execute/20060930-2.c would be misoptimized, because
8566 it use conflicting alias set to hold argument for the memcpy call.
8567 This check is probably unnecesary with -fno-strict-aliasing.
8568 Similarly for destvar. See also PR29286. */
8569 if (!var_decl_component_p (srcvar)
8570 /* Accept: memcpy (*char_var, "test", 1); that simplify
8571 to char_var='t'; */
8572 || is_gimple_min_invariant (srcvar)
8573 || readonly_data_expr (src))
8574 return NULL_TREE;
8576 destvar = build_fold_indirect_ref (dest);
8577 if (TREE_THIS_VOLATILE (destvar))
8578 return NULL_TREE;
8579 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8580 return NULL_TREE;
8581 if (!var_decl_component_p (destvar))
8582 return NULL_TREE;
8584 if (srctype == desttype
8585 || (gimple_in_ssa_p (cfun)
8586 && tree_ssa_useless_type_conversion_1 (desttype, srctype)))
8587 expr = srcvar;
8588 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8589 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8590 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8591 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8592 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8593 else
8594 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8595 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8598 if (ignore)
8599 return expr;
8601 if (endp == 0 || endp == 3)
8602 return omit_one_operand (type, dest, expr);
8604 if (expr == len)
8605 expr = NULL_TREE;
8607 if (endp == 2)
8608 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8609 ssize_int (1));
8611 len = fold_convert (TREE_TYPE (dest), len);
8612 dest = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
8613 dest = fold_convert (type, dest);
8614 if (expr)
8615 dest = omit_one_operand (type, dest, expr);
8616 return dest;
8619 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8620 If LEN is not NULL, it represents the length of the string to be
8621 copied. Return NULL_TREE if no simplification can be made. */
8623 tree
8624 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8626 tree fn;
8628 if (!validate_arg (dest, POINTER_TYPE)
8629 || !validate_arg (src, POINTER_TYPE))
8630 return NULL_TREE;
8632 /* If SRC and DEST are the same (and not volatile), return DEST. */
8633 if (operand_equal_p (src, dest, 0))
8634 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8636 if (optimize_size)
8637 return NULL_TREE;
8639 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8640 if (!fn)
8641 return NULL_TREE;
8643 if (!len)
8645 len = c_strlen (src, 1);
8646 if (! len || TREE_SIDE_EFFECTS (len))
8647 return NULL_TREE;
8650 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8651 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8652 build_call_expr (fn, 3, dest, src, len));
8655 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8656 If SLEN is not NULL, it represents the length of the source string.
8657 Return NULL_TREE if no simplification can be made. */
8659 tree
8660 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8662 tree fn;
8664 if (!validate_arg (dest, POINTER_TYPE)
8665 || !validate_arg (src, POINTER_TYPE)
8666 || !validate_arg (len, INTEGER_TYPE))
8667 return NULL_TREE;
8669 /* If the LEN parameter is zero, return DEST. */
8670 if (integer_zerop (len))
8671 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8673 /* We can't compare slen with len as constants below if len is not a
8674 constant. */
8675 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8676 return NULL_TREE;
8678 if (!slen)
8679 slen = c_strlen (src, 1);
8681 /* Now, we must be passed a constant src ptr parameter. */
8682 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8683 return NULL_TREE;
8685 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8687 /* We do not support simplification of this case, though we do
8688 support it when expanding trees into RTL. */
8689 /* FIXME: generate a call to __builtin_memset. */
8690 if (tree_int_cst_lt (slen, len))
8691 return NULL_TREE;
8693 /* OK transform into builtin memcpy. */
8694 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8695 if (!fn)
8696 return NULL_TREE;
8697 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8698 build_call_expr (fn, 3, dest, src, len));
8701 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8702 arguments to the call, and TYPE is its return type.
8703 Return NULL_TREE if no simplification can be made. */
8705 static tree
8706 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8708 if (!validate_arg (arg1, POINTER_TYPE)
8709 || !validate_arg (arg2, INTEGER_TYPE)
8710 || !validate_arg (len, INTEGER_TYPE))
8711 return NULL_TREE;
8712 else
8714 const char *p1;
8716 if (TREE_CODE (arg2) != INTEGER_CST
8717 || !host_integerp (len, 1))
8718 return NULL_TREE;
8720 p1 = c_getstr (arg1);
8721 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8723 char c;
8724 const char *r;
8725 tree tem;
8727 if (target_char_cast (arg2, &c))
8728 return NULL_TREE;
8730 r = memchr (p1, c, tree_low_cst (len, 1));
8732 if (r == NULL)
8733 return build_int_cst (TREE_TYPE (arg1), 0);
8735 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (arg1), arg1,
8736 build_int_cst (TREE_TYPE (arg1), r - p1));
8737 return fold_convert (type, tem);
8739 return NULL_TREE;
8743 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8744 Return NULL_TREE if no simplification can be made. */
8746 static tree
8747 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8749 const char *p1, *p2;
8751 if (!validate_arg (arg1, POINTER_TYPE)
8752 || !validate_arg (arg2, POINTER_TYPE)
8753 || !validate_arg (len, INTEGER_TYPE))
8754 return NULL_TREE;
8756 /* If the LEN parameter is zero, return zero. */
8757 if (integer_zerop (len))
8758 return omit_two_operands (integer_type_node, integer_zero_node,
8759 arg1, arg2);
8761 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8762 if (operand_equal_p (arg1, arg2, 0))
8763 return omit_one_operand (integer_type_node, integer_zero_node, len);
8765 p1 = c_getstr (arg1);
8766 p2 = c_getstr (arg2);
8768 /* If all arguments are constant, and the value of len is not greater
8769 than the lengths of arg1 and arg2, evaluate at compile-time. */
8770 if (host_integerp (len, 1) && p1 && p2
8771 && compare_tree_int (len, strlen (p1) + 1) <= 0
8772 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8774 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8776 if (r > 0)
8777 return integer_one_node;
8778 else if (r < 0)
8779 return integer_minus_one_node;
8780 else
8781 return integer_zero_node;
8784 /* If len parameter is one, return an expression corresponding to
8785 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8786 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8788 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8789 tree cst_uchar_ptr_node
8790 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8792 tree ind1 = fold_convert (integer_type_node,
8793 build1 (INDIRECT_REF, cst_uchar_node,
8794 fold_convert (cst_uchar_ptr_node,
8795 arg1)));
8796 tree ind2 = fold_convert (integer_type_node,
8797 build1 (INDIRECT_REF, cst_uchar_node,
8798 fold_convert (cst_uchar_ptr_node,
8799 arg2)));
8800 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8803 return NULL_TREE;
8806 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8807 Return NULL_TREE if no simplification can be made. */
8809 static tree
8810 fold_builtin_strcmp (tree arg1, tree arg2)
8812 const char *p1, *p2;
8814 if (!validate_arg (arg1, POINTER_TYPE)
8815 || !validate_arg (arg2, POINTER_TYPE))
8816 return NULL_TREE;
8818 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8819 if (operand_equal_p (arg1, arg2, 0))
8820 return integer_zero_node;
8822 p1 = c_getstr (arg1);
8823 p2 = c_getstr (arg2);
8825 if (p1 && p2)
8827 const int i = strcmp (p1, p2);
8828 if (i < 0)
8829 return integer_minus_one_node;
8830 else if (i > 0)
8831 return integer_one_node;
8832 else
8833 return integer_zero_node;
8836 /* If the second arg is "", return *(const unsigned char*)arg1. */
8837 if (p2 && *p2 == '\0')
8839 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8840 tree cst_uchar_ptr_node
8841 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8843 return fold_convert (integer_type_node,
8844 build1 (INDIRECT_REF, cst_uchar_node,
8845 fold_convert (cst_uchar_ptr_node,
8846 arg1)));
8849 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8850 if (p1 && *p1 == '\0')
8852 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8853 tree cst_uchar_ptr_node
8854 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8856 tree temp = fold_convert (integer_type_node,
8857 build1 (INDIRECT_REF, cst_uchar_node,
8858 fold_convert (cst_uchar_ptr_node,
8859 arg2)));
8860 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8863 return NULL_TREE;
8866 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8867 Return NULL_TREE if no simplification can be made. */
8869 static tree
8870 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
8872 const char *p1, *p2;
8874 if (!validate_arg (arg1, POINTER_TYPE)
8875 || !validate_arg (arg2, POINTER_TYPE)
8876 || !validate_arg (len, INTEGER_TYPE))
8877 return NULL_TREE;
8879 /* If the LEN parameter is zero, return zero. */
8880 if (integer_zerop (len))
8881 return omit_two_operands (integer_type_node, integer_zero_node,
8882 arg1, arg2);
8884 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8885 if (operand_equal_p (arg1, arg2, 0))
8886 return omit_one_operand (integer_type_node, integer_zero_node, len);
8888 p1 = c_getstr (arg1);
8889 p2 = c_getstr (arg2);
8891 if (host_integerp (len, 1) && p1 && p2)
8893 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8894 if (i > 0)
8895 return integer_one_node;
8896 else if (i < 0)
8897 return integer_minus_one_node;
8898 else
8899 return integer_zero_node;
8902 /* If the second arg is "", and the length is greater than zero,
8903 return *(const unsigned char*)arg1. */
8904 if (p2 && *p2 == '\0'
8905 && TREE_CODE (len) == INTEGER_CST
8906 && tree_int_cst_sgn (len) == 1)
8908 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8909 tree cst_uchar_ptr_node
8910 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8912 return fold_convert (integer_type_node,
8913 build1 (INDIRECT_REF, cst_uchar_node,
8914 fold_convert (cst_uchar_ptr_node,
8915 arg1)));
8918 /* If the first arg is "", and the length is greater than zero,
8919 return -*(const unsigned char*)arg2. */
8920 if (p1 && *p1 == '\0'
8921 && TREE_CODE (len) == INTEGER_CST
8922 && tree_int_cst_sgn (len) == 1)
8924 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8925 tree cst_uchar_ptr_node
8926 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8928 tree temp = fold_convert (integer_type_node,
8929 build1 (INDIRECT_REF, cst_uchar_node,
8930 fold_convert (cst_uchar_ptr_node,
8931 arg2)));
8932 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8935 /* If len parameter is one, return an expression corresponding to
8936 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8937 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8939 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8940 tree cst_uchar_ptr_node
8941 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8943 tree ind1 = fold_convert (integer_type_node,
8944 build1 (INDIRECT_REF, cst_uchar_node,
8945 fold_convert (cst_uchar_ptr_node,
8946 arg1)));
8947 tree ind2 = fold_convert (integer_type_node,
8948 build1 (INDIRECT_REF, cst_uchar_node,
8949 fold_convert (cst_uchar_ptr_node,
8950 arg2)));
8951 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8954 return NULL_TREE;
8957 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8958 ARG. Return NULL_TREE if no simplification can be made. */
8960 static tree
8961 fold_builtin_signbit (tree arg, tree type)
8963 tree temp;
8965 if (!validate_arg (arg, REAL_TYPE))
8966 return NULL_TREE;
8968 /* If ARG is a compile-time constant, determine the result. */
8969 if (TREE_CODE (arg) == REAL_CST
8970 && !TREE_OVERFLOW (arg))
8972 REAL_VALUE_TYPE c;
8974 c = TREE_REAL_CST (arg);
8975 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8976 return fold_convert (type, temp);
8979 /* If ARG is non-negative, the result is always zero. */
8980 if (tree_expr_nonnegative_p (arg))
8981 return omit_one_operand (type, integer_zero_node, arg);
8983 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8984 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8985 return fold_build2 (LT_EXPR, type, arg,
8986 build_real (TREE_TYPE (arg), dconst0));
8988 return NULL_TREE;
8991 /* Fold function call to builtin copysign, copysignf or copysignl with
8992 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8993 be made. */
8995 static tree
8996 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
8998 tree tem;
9000 if (!validate_arg (arg1, REAL_TYPE)
9001 || !validate_arg (arg2, REAL_TYPE))
9002 return NULL_TREE;
9004 /* copysign(X,X) is X. */
9005 if (operand_equal_p (arg1, arg2, 0))
9006 return fold_convert (type, arg1);
9008 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9009 if (TREE_CODE (arg1) == REAL_CST
9010 && TREE_CODE (arg2) == REAL_CST
9011 && !TREE_OVERFLOW (arg1)
9012 && !TREE_OVERFLOW (arg2))
9014 REAL_VALUE_TYPE c1, c2;
9016 c1 = TREE_REAL_CST (arg1);
9017 c2 = TREE_REAL_CST (arg2);
9018 /* c1.sign := c2.sign. */
9019 real_copysign (&c1, &c2);
9020 return build_real (type, c1);
9023 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9024 Remember to evaluate Y for side-effects. */
9025 if (tree_expr_nonnegative_p (arg2))
9026 return omit_one_operand (type,
9027 fold_build1 (ABS_EXPR, type, arg1),
9028 arg2);
9030 /* Strip sign changing operations for the first argument. */
9031 tem = fold_strip_sign_ops (arg1);
9032 if (tem)
9033 return build_call_expr (fndecl, 2, tem, arg2);
9035 return NULL_TREE;
9038 /* Fold a call to builtin isascii with argument ARG. */
9040 static tree
9041 fold_builtin_isascii (tree arg)
9043 if (!validate_arg (arg, INTEGER_TYPE))
9044 return NULL_TREE;
9045 else
9047 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9048 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9049 build_int_cst (NULL_TREE,
9050 ~ (unsigned HOST_WIDE_INT) 0x7f));
9051 return fold_build2 (EQ_EXPR, integer_type_node,
9052 arg, integer_zero_node);
9056 /* Fold a call to builtin toascii with argument ARG. */
9058 static tree
9059 fold_builtin_toascii (tree arg)
9061 if (!validate_arg (arg, INTEGER_TYPE))
9062 return NULL_TREE;
9064 /* Transform toascii(c) -> (c & 0x7f). */
9065 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9066 build_int_cst (NULL_TREE, 0x7f));
9069 /* Fold a call to builtin isdigit with argument ARG. */
9071 static tree
9072 fold_builtin_isdigit (tree arg)
9074 if (!validate_arg (arg, INTEGER_TYPE))
9075 return NULL_TREE;
9076 else
9078 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9079 /* According to the C standard, isdigit is unaffected by locale.
9080 However, it definitely is affected by the target character set. */
9081 unsigned HOST_WIDE_INT target_digit0
9082 = lang_hooks.to_target_charset ('0');
9084 if (target_digit0 == 0)
9085 return NULL_TREE;
9087 arg = fold_convert (unsigned_type_node, arg);
9088 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9089 build_int_cst (unsigned_type_node, target_digit0));
9090 return fold_build2 (LE_EXPR, integer_type_node, arg,
9091 build_int_cst (unsigned_type_node, 9));
9095 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9097 static tree
9098 fold_builtin_fabs (tree arg, tree type)
9100 if (!validate_arg (arg, REAL_TYPE))
9101 return NULL_TREE;
9103 arg = fold_convert (type, arg);
9104 if (TREE_CODE (arg) == REAL_CST)
9105 return fold_abs_const (arg, type);
9106 return fold_build1 (ABS_EXPR, type, arg);
9109 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9111 static tree
9112 fold_builtin_abs (tree arg, tree type)
9114 if (!validate_arg (arg, INTEGER_TYPE))
9115 return NULL_TREE;
9117 arg = fold_convert (type, arg);
9118 if (TREE_CODE (arg) == INTEGER_CST)
9119 return fold_abs_const (arg, type);
9120 return fold_build1 (ABS_EXPR, type, arg);
9123 /* Fold a call to builtin fmin or fmax. */
9125 static tree
9126 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9128 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9130 /* Calculate the result when the argument is a constant. */
9131 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9133 if (res)
9134 return res;
9136 /* If either argument is NaN, return the other one. Avoid the
9137 transformation if we get (and honor) a signalling NaN. Using
9138 omit_one_operand() ensures we create a non-lvalue. */
9139 if (TREE_CODE (arg0) == REAL_CST
9140 && real_isnan (&TREE_REAL_CST (arg0))
9141 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9142 || ! TREE_REAL_CST (arg0).signalling))
9143 return omit_one_operand (type, arg1, arg0);
9144 if (TREE_CODE (arg1) == REAL_CST
9145 && real_isnan (&TREE_REAL_CST (arg1))
9146 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9147 || ! TREE_REAL_CST (arg1).signalling))
9148 return omit_one_operand (type, arg0, arg1);
9150 /* Transform fmin/fmax(x,x) -> x. */
9151 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9152 return omit_one_operand (type, arg0, arg1);
9154 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9155 functions to return the numeric arg if the other one is NaN.
9156 These tree codes don't honor that, so only transform if
9157 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9158 handled, so we don't have to worry about it either. */
9159 if (flag_finite_math_only)
9160 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9161 fold_convert (type, arg0),
9162 fold_convert (type, arg1));
9164 return NULL_TREE;
9167 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9169 static tree
9170 fold_builtin_carg (tree arg, tree type)
9172 if (validate_arg (arg, COMPLEX_TYPE))
9174 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9176 if (atan2_fn)
9178 tree new_arg = builtin_save_expr (arg);
9179 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9180 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9181 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9185 return NULL_TREE;
9188 /* Fold a call to builtin logb/ilogb. */
9190 static tree
9191 fold_builtin_logb (tree arg, tree rettype)
9193 if (! validate_arg (arg, REAL_TYPE))
9194 return NULL_TREE;
9196 STRIP_NOPS (arg);
9198 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9200 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9202 switch (value->cl)
9204 case rvc_nan:
9205 case rvc_inf:
9206 /* If arg is Inf or NaN and we're logb, return it. */
9207 if (TREE_CODE (rettype) == REAL_TYPE)
9208 return fold_convert (rettype, arg);
9209 /* Fall through... */
9210 case rvc_zero:
9211 /* Zero may set errno and/or raise an exception for logb, also
9212 for ilogb we don't know FP_ILOGB0. */
9213 return NULL_TREE;
9214 case rvc_normal:
9215 /* For normal numbers, proceed iff radix == 2. In GCC,
9216 normalized significands are in the range [0.5, 1.0). We
9217 want the exponent as if they were [1.0, 2.0) so get the
9218 exponent and subtract 1. */
9219 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9220 return fold_convert (rettype, build_int_cst (NULL_TREE,
9221 REAL_EXP (value)-1));
9222 break;
9226 return NULL_TREE;
9229 /* Fold a call to builtin significand, if radix == 2. */
9231 static tree
9232 fold_builtin_significand (tree arg, tree rettype)
9234 if (! validate_arg (arg, REAL_TYPE))
9235 return NULL_TREE;
9237 STRIP_NOPS (arg);
9239 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9241 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9243 switch (value->cl)
9245 case rvc_zero:
9246 case rvc_nan:
9247 case rvc_inf:
9248 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9249 return fold_convert (rettype, arg);
9250 case rvc_normal:
9251 /* For normal numbers, proceed iff radix == 2. */
9252 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9254 REAL_VALUE_TYPE result = *value;
9255 /* In GCC, normalized significands are in the range [0.5,
9256 1.0). We want them to be [1.0, 2.0) so set the
9257 exponent to 1. */
9258 SET_REAL_EXP (&result, 1);
9259 return build_real (rettype, result);
9261 break;
9265 return NULL_TREE;
9268 /* Fold a call to builtin frexp, we can assume the base is 2. */
9270 static tree
9271 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9273 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9274 return NULL_TREE;
9276 STRIP_NOPS (arg0);
9278 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9279 return NULL_TREE;
9281 arg1 = build_fold_indirect_ref (arg1);
9283 /* Proceed if a valid pointer type was passed in. */
9284 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9286 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9287 tree frac, exp;
9289 switch (value->cl)
9291 case rvc_zero:
9292 /* For +-0, return (*exp = 0, +-0). */
9293 exp = integer_zero_node;
9294 frac = arg0;
9295 break;
9296 case rvc_nan:
9297 case rvc_inf:
9298 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9299 return omit_one_operand (rettype, arg0, arg1);
9300 case rvc_normal:
9302 /* Since the frexp function always expects base 2, and in
9303 GCC normalized significands are already in the range
9304 [0.5, 1.0), we have exactly what frexp wants. */
9305 REAL_VALUE_TYPE frac_rvt = *value;
9306 SET_REAL_EXP (&frac_rvt, 0);
9307 frac = build_real (rettype, frac_rvt);
9308 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9310 break;
9311 default:
9312 gcc_unreachable ();
9315 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9316 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9317 TREE_SIDE_EFFECTS (arg1) = 1;
9318 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9321 return NULL_TREE;
9324 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9325 then we can assume the base is two. If it's false, then we have to
9326 check the mode of the TYPE parameter in certain cases. */
9328 static tree
9329 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9331 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9333 STRIP_NOPS (arg0);
9334 STRIP_NOPS (arg1);
9336 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9337 if (real_zerop (arg0) || integer_zerop (arg1)
9338 || (TREE_CODE (arg0) == REAL_CST
9339 && (real_isnan (&TREE_REAL_CST (arg0))
9340 || real_isinf (&TREE_REAL_CST (arg0)))))
9341 return omit_one_operand (type, arg0, arg1);
9343 /* If both arguments are constant, then try to evaluate it. */
9344 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9345 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9346 && host_integerp (arg1, 0))
9348 /* Bound the maximum adjustment to twice the range of the
9349 mode's valid exponents. Use abs to ensure the range is
9350 positive as a sanity check. */
9351 const long max_exp_adj = 2 *
9352 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9353 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9355 /* Get the user-requested adjustment. */
9356 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9358 /* The requested adjustment must be inside this range. This
9359 is a preliminary cap to avoid things like overflow, we
9360 may still fail to compute the result for other reasons. */
9361 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9363 REAL_VALUE_TYPE initial_result;
9365 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9367 /* Ensure we didn't overflow. */
9368 if (! real_isinf (&initial_result))
9370 const REAL_VALUE_TYPE trunc_result
9371 = real_value_truncate (TYPE_MODE (type), initial_result);
9373 /* Only proceed if the target mode can hold the
9374 resulting value. */
9375 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9376 return build_real (type, trunc_result);
9382 return NULL_TREE;
9385 /* Fold a call to builtin modf. */
9387 static tree
9388 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9390 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9391 return NULL_TREE;
9393 STRIP_NOPS (arg0);
9395 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9396 return NULL_TREE;
9398 arg1 = build_fold_indirect_ref (arg1);
9400 /* Proceed if a valid pointer type was passed in. */
9401 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9403 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9404 REAL_VALUE_TYPE trunc, frac;
9406 switch (value->cl)
9408 case rvc_nan:
9409 case rvc_zero:
9410 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9411 trunc = frac = *value;
9412 break;
9413 case rvc_inf:
9414 /* For +-Inf, return (*arg1 = arg0, +-0). */
9415 frac = dconst0;
9416 frac.sign = value->sign;
9417 trunc = *value;
9418 break;
9419 case rvc_normal:
9420 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9421 real_trunc (&trunc, VOIDmode, value);
9422 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9423 /* If the original number was negative and already
9424 integral, then the fractional part is -0.0. */
9425 if (value->sign && frac.cl == rvc_zero)
9426 frac.sign = value->sign;
9427 break;
9430 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9431 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9432 build_real (rettype, trunc));
9433 TREE_SIDE_EFFECTS (arg1) = 1;
9434 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9435 build_real (rettype, frac));
9438 return NULL_TREE;
9441 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9442 ARG is the argument for the call. */
9444 static tree
9445 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9447 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9448 REAL_VALUE_TYPE r;
9450 if (!validate_arg (arg, REAL_TYPE))
9452 error ("non-floating-point argument to function %qs",
9453 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9454 return error_mark_node;
9457 switch (builtin_index)
9459 case BUILT_IN_ISINF:
9460 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9461 return omit_one_operand (type, integer_zero_node, arg);
9463 if (TREE_CODE (arg) == REAL_CST)
9465 r = TREE_REAL_CST (arg);
9466 if (real_isinf (&r))
9467 return real_compare (GT_EXPR, &r, &dconst0)
9468 ? integer_one_node : integer_minus_one_node;
9469 else
9470 return integer_zero_node;
9473 return NULL_TREE;
9475 case BUILT_IN_FINITE:
9476 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9477 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9478 return omit_one_operand (type, integer_one_node, arg);
9480 if (TREE_CODE (arg) == REAL_CST)
9482 r = TREE_REAL_CST (arg);
9483 return real_isinf (&r) || real_isnan (&r)
9484 ? integer_zero_node : integer_one_node;
9487 return NULL_TREE;
9489 case BUILT_IN_ISNAN:
9490 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9491 return omit_one_operand (type, integer_zero_node, arg);
9493 if (TREE_CODE (arg) == REAL_CST)
9495 r = TREE_REAL_CST (arg);
9496 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9499 arg = builtin_save_expr (arg);
9500 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9502 default:
9503 gcc_unreachable ();
9507 /* Fold a call to an unordered comparison function such as
9508 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9509 being called and ARG0 and ARG1 are the arguments for the call.
9510 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9511 the opposite of the desired result. UNORDERED_CODE is used
9512 for modes that can hold NaNs and ORDERED_CODE is used for
9513 the rest. */
9515 static tree
9516 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9517 enum tree_code unordered_code,
9518 enum tree_code ordered_code)
9520 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9521 enum tree_code code;
9522 tree type0, type1;
9523 enum tree_code code0, code1;
9524 tree cmp_type = NULL_TREE;
9526 type0 = TREE_TYPE (arg0);
9527 type1 = TREE_TYPE (arg1);
9529 code0 = TREE_CODE (type0);
9530 code1 = TREE_CODE (type1);
9532 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9533 /* Choose the wider of two real types. */
9534 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9535 ? type0 : type1;
9536 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9537 cmp_type = type0;
9538 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9539 cmp_type = type1;
9540 else
9542 error ("non-floating-point argument to function %qs",
9543 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9544 return error_mark_node;
9547 arg0 = fold_convert (cmp_type, arg0);
9548 arg1 = fold_convert (cmp_type, arg1);
9550 if (unordered_code == UNORDERED_EXPR)
9552 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9553 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9554 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9557 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9558 : ordered_code;
9559 return fold_build1 (TRUTH_NOT_EXPR, type,
9560 fold_build2 (code, type, arg0, arg1));
9563 /* Fold a call to built-in function FNDECL with 0 arguments.
9564 IGNORE is true if the result of the function call is ignored. This
9565 function returns NULL_TREE if no simplification was possible. */
9567 static tree
9568 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9570 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9571 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9572 switch (fcode)
9574 CASE_FLT_FN (BUILT_IN_INF):
9575 case BUILT_IN_INFD32:
9576 case BUILT_IN_INFD64:
9577 case BUILT_IN_INFD128:
9578 return fold_builtin_inf (type, true);
9580 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9581 return fold_builtin_inf (type, false);
9583 case BUILT_IN_CLASSIFY_TYPE:
9584 return fold_builtin_classify_type (NULL_TREE);
9586 default:
9587 break;
9589 return NULL_TREE;
9592 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9593 IGNORE is true if the result of the function call is ignored. This
9594 function returns NULL_TREE if no simplification was possible. */
9596 static tree
9597 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9599 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9600 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9601 switch (fcode)
9604 case BUILT_IN_CONSTANT_P:
9606 tree val = fold_builtin_constant_p (arg0);
9608 /* Gimplification will pull the CALL_EXPR for the builtin out of
9609 an if condition. When not optimizing, we'll not CSE it back.
9610 To avoid link error types of regressions, return false now. */
9611 if (!val && !optimize)
9612 val = integer_zero_node;
9614 return val;
9617 case BUILT_IN_CLASSIFY_TYPE:
9618 return fold_builtin_classify_type (arg0);
9620 case BUILT_IN_STRLEN:
9621 return fold_builtin_strlen (arg0);
9623 CASE_FLT_FN (BUILT_IN_FABS):
9624 return fold_builtin_fabs (arg0, type);
9626 case BUILT_IN_ABS:
9627 case BUILT_IN_LABS:
9628 case BUILT_IN_LLABS:
9629 case BUILT_IN_IMAXABS:
9630 return fold_builtin_abs (arg0, type);
9632 CASE_FLT_FN (BUILT_IN_CONJ):
9633 if (validate_arg (arg0, COMPLEX_TYPE))
9634 return fold_build1 (CONJ_EXPR, type, arg0);
9635 break;
9637 CASE_FLT_FN (BUILT_IN_CREAL):
9638 if (validate_arg (arg0, COMPLEX_TYPE))
9639 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9640 break;
9642 CASE_FLT_FN (BUILT_IN_CIMAG):
9643 if (validate_arg (arg0, COMPLEX_TYPE))
9644 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9645 break;
9647 CASE_FLT_FN (BUILT_IN_CCOS):
9648 CASE_FLT_FN (BUILT_IN_CCOSH):
9649 /* These functions are "even", i.e. f(x) == f(-x). */
9650 if (validate_arg (arg0, COMPLEX_TYPE))
9652 tree narg = fold_strip_sign_ops (arg0);
9653 if (narg)
9654 return build_call_expr (fndecl, 1, narg);
9656 break;
9658 CASE_FLT_FN (BUILT_IN_CABS):
9659 return fold_builtin_cabs (arg0, type, fndecl);
9661 CASE_FLT_FN (BUILT_IN_CARG):
9662 return fold_builtin_carg (arg0, type);
9664 CASE_FLT_FN (BUILT_IN_SQRT):
9665 return fold_builtin_sqrt (arg0, type);
9667 CASE_FLT_FN (BUILT_IN_CBRT):
9668 return fold_builtin_cbrt (arg0, type);
9670 CASE_FLT_FN (BUILT_IN_ASIN):
9671 if (validate_arg (arg0, REAL_TYPE))
9672 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9673 &dconstm1, &dconst1, true);
9674 break;
9676 CASE_FLT_FN (BUILT_IN_ACOS):
9677 if (validate_arg (arg0, REAL_TYPE))
9678 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9679 &dconstm1, &dconst1, true);
9680 break;
9682 CASE_FLT_FN (BUILT_IN_ATAN):
9683 if (validate_arg (arg0, REAL_TYPE))
9684 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9685 break;
9687 CASE_FLT_FN (BUILT_IN_ASINH):
9688 if (validate_arg (arg0, REAL_TYPE))
9689 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9690 break;
9692 CASE_FLT_FN (BUILT_IN_ACOSH):
9693 if (validate_arg (arg0, REAL_TYPE))
9694 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9695 &dconst1, NULL, true);
9696 break;
9698 CASE_FLT_FN (BUILT_IN_ATANH):
9699 if (validate_arg (arg0, REAL_TYPE))
9700 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9701 &dconstm1, &dconst1, false);
9702 break;
9704 CASE_FLT_FN (BUILT_IN_SIN):
9705 if (validate_arg (arg0, REAL_TYPE))
9706 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9707 break;
9709 CASE_FLT_FN (BUILT_IN_COS):
9710 return fold_builtin_cos (arg0, type, fndecl);
9711 break;
9713 CASE_FLT_FN (BUILT_IN_TAN):
9714 return fold_builtin_tan (arg0, type);
9716 CASE_FLT_FN (BUILT_IN_CEXP):
9717 return fold_builtin_cexp (arg0, type);
9719 CASE_FLT_FN (BUILT_IN_CEXPI):
9720 if (validate_arg (arg0, REAL_TYPE))
9721 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9722 break;
9724 CASE_FLT_FN (BUILT_IN_SINH):
9725 if (validate_arg (arg0, REAL_TYPE))
9726 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9727 break;
9729 CASE_FLT_FN (BUILT_IN_COSH):
9730 return fold_builtin_cosh (arg0, type, fndecl);
9732 CASE_FLT_FN (BUILT_IN_TANH):
9733 if (validate_arg (arg0, REAL_TYPE))
9734 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9735 break;
9737 CASE_FLT_FN (BUILT_IN_ERF):
9738 if (validate_arg (arg0, REAL_TYPE))
9739 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9740 break;
9742 CASE_FLT_FN (BUILT_IN_ERFC):
9743 if (validate_arg (arg0, REAL_TYPE))
9744 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9745 break;
9747 CASE_FLT_FN (BUILT_IN_TGAMMA):
9748 if (validate_arg (arg0, REAL_TYPE))
9749 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9750 break;
9752 CASE_FLT_FN (BUILT_IN_EXP):
9753 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9755 CASE_FLT_FN (BUILT_IN_EXP2):
9756 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9758 CASE_FLT_FN (BUILT_IN_EXP10):
9759 CASE_FLT_FN (BUILT_IN_POW10):
9760 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9762 CASE_FLT_FN (BUILT_IN_EXPM1):
9763 if (validate_arg (arg0, REAL_TYPE))
9764 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9765 break;
9767 CASE_FLT_FN (BUILT_IN_LOG):
9768 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9770 CASE_FLT_FN (BUILT_IN_LOG2):
9771 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9773 CASE_FLT_FN (BUILT_IN_LOG10):
9774 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9776 CASE_FLT_FN (BUILT_IN_LOG1P):
9777 if (validate_arg (arg0, REAL_TYPE))
9778 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9779 &dconstm1, NULL, false);
9780 break;
9782 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9783 CASE_FLT_FN (BUILT_IN_J0):
9784 if (validate_arg (arg0, REAL_TYPE))
9785 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9786 NULL, NULL, 0);
9787 break;
9789 CASE_FLT_FN (BUILT_IN_J1):
9790 if (validate_arg (arg0, REAL_TYPE))
9791 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9792 NULL, NULL, 0);
9793 break;
9795 CASE_FLT_FN (BUILT_IN_Y0):
9796 if (validate_arg (arg0, REAL_TYPE))
9797 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9798 &dconst0, NULL, false);
9799 break;
9801 CASE_FLT_FN (BUILT_IN_Y1):
9802 if (validate_arg (arg0, REAL_TYPE))
9803 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9804 &dconst0, NULL, false);
9805 break;
9806 #endif
9808 CASE_FLT_FN (BUILT_IN_NAN):
9809 case BUILT_IN_NAND32:
9810 case BUILT_IN_NAND64:
9811 case BUILT_IN_NAND128:
9812 return fold_builtin_nan (arg0, type, true);
9814 CASE_FLT_FN (BUILT_IN_NANS):
9815 return fold_builtin_nan (arg0, type, false);
9817 CASE_FLT_FN (BUILT_IN_FLOOR):
9818 return fold_builtin_floor (fndecl, arg0);
9820 CASE_FLT_FN (BUILT_IN_CEIL):
9821 return fold_builtin_ceil (fndecl, arg0);
9823 CASE_FLT_FN (BUILT_IN_TRUNC):
9824 return fold_builtin_trunc (fndecl, arg0);
9826 CASE_FLT_FN (BUILT_IN_ROUND):
9827 return fold_builtin_round (fndecl, arg0);
9829 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9830 CASE_FLT_FN (BUILT_IN_RINT):
9831 return fold_trunc_transparent_mathfn (fndecl, arg0);
9833 CASE_FLT_FN (BUILT_IN_LCEIL):
9834 CASE_FLT_FN (BUILT_IN_LLCEIL):
9835 CASE_FLT_FN (BUILT_IN_LFLOOR):
9836 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9837 CASE_FLT_FN (BUILT_IN_LROUND):
9838 CASE_FLT_FN (BUILT_IN_LLROUND):
9839 return fold_builtin_int_roundingfn (fndecl, arg0);
9841 CASE_FLT_FN (BUILT_IN_LRINT):
9842 CASE_FLT_FN (BUILT_IN_LLRINT):
9843 return fold_fixed_mathfn (fndecl, arg0);
9845 case BUILT_IN_BSWAP32:
9846 case BUILT_IN_BSWAP64:
9847 return fold_builtin_bswap (fndecl, arg0);
9849 CASE_INT_FN (BUILT_IN_FFS):
9850 CASE_INT_FN (BUILT_IN_CLZ):
9851 CASE_INT_FN (BUILT_IN_CTZ):
9852 CASE_INT_FN (BUILT_IN_POPCOUNT):
9853 CASE_INT_FN (BUILT_IN_PARITY):
9854 return fold_builtin_bitop (fndecl, arg0);
9856 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9857 return fold_builtin_signbit (arg0, type);
9859 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9860 return fold_builtin_significand (arg0, type);
9862 CASE_FLT_FN (BUILT_IN_ILOGB):
9863 CASE_FLT_FN (BUILT_IN_LOGB):
9864 return fold_builtin_logb (arg0, type);
9866 case BUILT_IN_ISASCII:
9867 return fold_builtin_isascii (arg0);
9869 case BUILT_IN_TOASCII:
9870 return fold_builtin_toascii (arg0);
9872 case BUILT_IN_ISDIGIT:
9873 return fold_builtin_isdigit (arg0);
9875 CASE_FLT_FN (BUILT_IN_FINITE):
9876 case BUILT_IN_FINITED32:
9877 case BUILT_IN_FINITED64:
9878 case BUILT_IN_FINITED128:
9879 return fold_builtin_classify (fndecl, arg0, BUILT_IN_FINITE);
9881 CASE_FLT_FN (BUILT_IN_ISINF):
9882 case BUILT_IN_ISINFD32:
9883 case BUILT_IN_ISINFD64:
9884 case BUILT_IN_ISINFD128:
9885 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
9887 CASE_FLT_FN (BUILT_IN_ISNAN):
9888 case BUILT_IN_ISNAND32:
9889 case BUILT_IN_ISNAND64:
9890 case BUILT_IN_ISNAND128:
9891 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
9893 case BUILT_IN_PRINTF:
9894 case BUILT_IN_PRINTF_UNLOCKED:
9895 case BUILT_IN_VPRINTF:
9896 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
9898 default:
9899 break;
9902 return NULL_TREE;
9906 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9907 IGNORE is true if the result of the function call is ignored. This
9908 function returns NULL_TREE if no simplification was possible. */
9910 static tree
9911 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
9913 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9914 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9916 switch (fcode)
9918 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9919 CASE_FLT_FN (BUILT_IN_JN):
9920 if (validate_arg (arg0, INTEGER_TYPE)
9921 && validate_arg (arg1, REAL_TYPE))
9922 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9923 break;
9925 CASE_FLT_FN (BUILT_IN_YN):
9926 if (validate_arg (arg0, INTEGER_TYPE)
9927 && validate_arg (arg1, REAL_TYPE))
9928 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9929 &dconst0, false);
9930 break;
9932 CASE_FLT_FN (BUILT_IN_DREM):
9933 CASE_FLT_FN (BUILT_IN_REMAINDER):
9934 if (validate_arg (arg0, REAL_TYPE)
9935 && validate_arg(arg1, REAL_TYPE))
9936 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9937 break;
9939 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9940 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9941 if (validate_arg (arg0, REAL_TYPE)
9942 && validate_arg(arg1, POINTER_TYPE))
9943 return do_mpfr_lgamma_r (arg0, arg1, type);
9944 break;
9945 #endif
9947 CASE_FLT_FN (BUILT_IN_ATAN2):
9948 if (validate_arg (arg0, REAL_TYPE)
9949 && validate_arg(arg1, REAL_TYPE))
9950 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9951 break;
9953 CASE_FLT_FN (BUILT_IN_FDIM):
9954 if (validate_arg (arg0, REAL_TYPE)
9955 && validate_arg(arg1, REAL_TYPE))
9956 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9957 break;
9959 CASE_FLT_FN (BUILT_IN_HYPOT):
9960 return fold_builtin_hypot (fndecl, arg0, arg1, type);
9962 CASE_FLT_FN (BUILT_IN_LDEXP):
9963 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
9964 CASE_FLT_FN (BUILT_IN_SCALBN):
9965 CASE_FLT_FN (BUILT_IN_SCALBLN):
9966 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
9968 CASE_FLT_FN (BUILT_IN_FREXP):
9969 return fold_builtin_frexp (arg0, arg1, type);
9971 CASE_FLT_FN (BUILT_IN_MODF):
9972 return fold_builtin_modf (arg0, arg1, type);
9974 case BUILT_IN_BZERO:
9975 return fold_builtin_bzero (arg0, arg1, ignore);
9977 case BUILT_IN_FPUTS:
9978 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
9980 case BUILT_IN_FPUTS_UNLOCKED:
9981 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
9983 case BUILT_IN_STRSTR:
9984 return fold_builtin_strstr (arg0, arg1, type);
9986 case BUILT_IN_STRCAT:
9987 return fold_builtin_strcat (arg0, arg1);
9989 case BUILT_IN_STRSPN:
9990 return fold_builtin_strspn (arg0, arg1);
9992 case BUILT_IN_STRCSPN:
9993 return fold_builtin_strcspn (arg0, arg1);
9995 case BUILT_IN_STRCHR:
9996 case BUILT_IN_INDEX:
9997 return fold_builtin_strchr (arg0, arg1, type);
9999 case BUILT_IN_STRRCHR:
10000 case BUILT_IN_RINDEX:
10001 return fold_builtin_strrchr (arg0, arg1, type);
10003 case BUILT_IN_STRCPY:
10004 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10006 case BUILT_IN_STRCMP:
10007 return fold_builtin_strcmp (arg0, arg1);
10009 case BUILT_IN_STRPBRK:
10010 return fold_builtin_strpbrk (arg0, arg1, type);
10012 case BUILT_IN_EXPECT:
10013 return fold_builtin_expect (arg0);
10015 CASE_FLT_FN (BUILT_IN_POW):
10016 return fold_builtin_pow (fndecl, arg0, arg1, type);
10018 CASE_FLT_FN (BUILT_IN_POWI):
10019 return fold_builtin_powi (fndecl, arg0, arg1, type);
10021 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10022 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10024 CASE_FLT_FN (BUILT_IN_FMIN):
10025 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10027 CASE_FLT_FN (BUILT_IN_FMAX):
10028 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10030 case BUILT_IN_ISGREATER:
10031 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10032 case BUILT_IN_ISGREATEREQUAL:
10033 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10034 case BUILT_IN_ISLESS:
10035 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10036 case BUILT_IN_ISLESSEQUAL:
10037 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10038 case BUILT_IN_ISLESSGREATER:
10039 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10040 case BUILT_IN_ISUNORDERED:
10041 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10042 NOP_EXPR);
10044 /* We do the folding for va_start in the expander. */
10045 case BUILT_IN_VA_START:
10046 break;
10048 case BUILT_IN_SPRINTF:
10049 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10051 case BUILT_IN_OBJECT_SIZE:
10052 return fold_builtin_object_size (arg0, arg1);
10054 case BUILT_IN_PRINTF:
10055 case BUILT_IN_PRINTF_UNLOCKED:
10056 case BUILT_IN_VPRINTF:
10057 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10059 case BUILT_IN_PRINTF_CHK:
10060 case BUILT_IN_VPRINTF_CHK:
10061 if (!validate_arg (arg0, INTEGER_TYPE)
10062 || TREE_SIDE_EFFECTS (arg0))
10063 return NULL_TREE;
10064 else
10065 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10066 break;
10068 case BUILT_IN_FPRINTF:
10069 case BUILT_IN_FPRINTF_UNLOCKED:
10070 case BUILT_IN_VFPRINTF:
10071 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10072 ignore, fcode);
10074 default:
10075 break;
10077 return NULL_TREE;
10080 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10081 and ARG2. IGNORE is true if the result of the function call is ignored.
10082 This function returns NULL_TREE if no simplification was possible. */
10084 static tree
10085 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10087 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10088 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10089 switch (fcode)
10092 CASE_FLT_FN (BUILT_IN_SINCOS):
10093 return fold_builtin_sincos (arg0, arg1, arg2);
10095 CASE_FLT_FN (BUILT_IN_FMA):
10096 if (validate_arg (arg0, REAL_TYPE)
10097 && validate_arg(arg1, REAL_TYPE)
10098 && validate_arg(arg2, REAL_TYPE))
10099 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10100 break;
10102 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10103 CASE_FLT_FN (BUILT_IN_REMQUO):
10104 if (validate_arg (arg0, REAL_TYPE)
10105 && validate_arg(arg1, REAL_TYPE)
10106 && validate_arg(arg2, POINTER_TYPE))
10107 return do_mpfr_remquo (arg0, arg1, arg2);
10108 break;
10109 #endif
10111 case BUILT_IN_MEMSET:
10112 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10114 case BUILT_IN_BCOPY:
10115 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10117 case BUILT_IN_MEMCPY:
10118 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10120 case BUILT_IN_MEMPCPY:
10121 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10123 case BUILT_IN_MEMMOVE:
10124 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10126 case BUILT_IN_STRNCAT:
10127 return fold_builtin_strncat (arg0, arg1, arg2);
10129 case BUILT_IN_STRNCPY:
10130 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10132 case BUILT_IN_STRNCMP:
10133 return fold_builtin_strncmp (arg0, arg1, arg2);
10135 case BUILT_IN_MEMCHR:
10136 return fold_builtin_memchr (arg0, arg1, arg2, type);
10138 case BUILT_IN_BCMP:
10139 case BUILT_IN_MEMCMP:
10140 return fold_builtin_memcmp (arg0, arg1, arg2);;
10142 case BUILT_IN_SPRINTF:
10143 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10145 case BUILT_IN_STRCPY_CHK:
10146 case BUILT_IN_STPCPY_CHK:
10147 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10148 ignore, fcode);
10150 case BUILT_IN_STRCAT_CHK:
10151 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10153 case BUILT_IN_PRINTF_CHK:
10154 case BUILT_IN_VPRINTF_CHK:
10155 if (!validate_arg (arg0, INTEGER_TYPE)
10156 || TREE_SIDE_EFFECTS (arg0))
10157 return NULL_TREE;
10158 else
10159 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10160 break;
10162 case BUILT_IN_FPRINTF:
10163 case BUILT_IN_FPRINTF_UNLOCKED:
10164 case BUILT_IN_VFPRINTF:
10165 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10167 case BUILT_IN_FPRINTF_CHK:
10168 case BUILT_IN_VFPRINTF_CHK:
10169 if (!validate_arg (arg1, INTEGER_TYPE)
10170 || TREE_SIDE_EFFECTS (arg1))
10171 return NULL_TREE;
10172 else
10173 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10174 ignore, fcode);
10176 default:
10177 break;
10179 return NULL_TREE;
10182 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10183 ARG2, and ARG3. IGNORE is true if the result of the function call is
10184 ignored. This function returns NULL_TREE if no simplification was
10185 possible. */
10187 static tree
10188 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10189 bool ignore)
10191 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10193 switch (fcode)
10195 case BUILT_IN_MEMCPY_CHK:
10196 case BUILT_IN_MEMPCPY_CHK:
10197 case BUILT_IN_MEMMOVE_CHK:
10198 case BUILT_IN_MEMSET_CHK:
10199 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10200 NULL_TREE, ignore,
10201 DECL_FUNCTION_CODE (fndecl));
10203 case BUILT_IN_STRNCPY_CHK:
10204 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10206 case BUILT_IN_STRNCAT_CHK:
10207 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10209 case BUILT_IN_FPRINTF_CHK:
10210 case BUILT_IN_VFPRINTF_CHK:
10211 if (!validate_arg (arg1, INTEGER_TYPE)
10212 || TREE_SIDE_EFFECTS (arg1))
10213 return NULL_TREE;
10214 else
10215 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10216 ignore, fcode);
10217 break;
10219 default:
10220 break;
10222 return NULL_TREE;
10225 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10226 arguments, where NARGS <= 4. IGNORE is true if the result of the
10227 function call is ignored. This function returns NULL_TREE if no
10228 simplification was possible. Note that this only folds builtins with
10229 fixed argument patterns. Foldings that do varargs-to-varargs
10230 transformations, or that match calls with more than 4 arguments,
10231 need to be handled with fold_builtin_varargs instead. */
10233 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10235 static tree
10236 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10238 tree ret = NULL_TREE;
10239 switch (nargs)
10241 case 0:
10242 ret = fold_builtin_0 (fndecl, ignore);
10243 break;
10244 case 1:
10245 ret = fold_builtin_1 (fndecl, args[0], ignore);
10246 break;
10247 case 2:
10248 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10249 break;
10250 case 3:
10251 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10252 break;
10253 case 4:
10254 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10255 ignore);
10256 break;
10257 default:
10258 break;
10260 if (ret)
10262 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10263 TREE_NO_WARNING (ret) = 1;
10264 return ret;
10266 return NULL_TREE;
10269 /* Builtins with folding operations that operate on "..." arguments
10270 need special handling; we need to store the arguments in a convenient
10271 data structure before attempting any folding. Fortunately there are
10272 only a few builtins that fall into this category. FNDECL is the
10273 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10274 result of the function call is ignored. */
10276 static tree
10277 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10279 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10280 tree ret = NULL_TREE;
10282 switch (fcode)
10284 case BUILT_IN_SPRINTF_CHK:
10285 case BUILT_IN_VSPRINTF_CHK:
10286 ret = fold_builtin_sprintf_chk (exp, fcode);
10287 break;
10289 case BUILT_IN_SNPRINTF_CHK:
10290 case BUILT_IN_VSNPRINTF_CHK:
10291 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10293 default:
10294 break;
10296 if (ret)
10298 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10299 TREE_NO_WARNING (ret) = 1;
10300 return ret;
10302 return NULL_TREE;
10305 /* A wrapper function for builtin folding that prevents warnings for
10306 "statement without effect" and the like, caused by removing the
10307 call node earlier than the warning is generated. */
10309 tree
10310 fold_call_expr (tree exp, bool ignore)
10312 tree ret = NULL_TREE;
10313 tree fndecl = get_callee_fndecl (exp);
10314 if (fndecl
10315 && TREE_CODE (fndecl) == FUNCTION_DECL
10316 && DECL_BUILT_IN (fndecl))
10318 /* FIXME: Don't use a list in this interface. */
10319 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10320 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10321 else
10323 int nargs = call_expr_nargs (exp);
10324 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10326 tree *args = CALL_EXPR_ARGP (exp);
10327 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10329 if (!ret)
10330 ret = fold_builtin_varargs (fndecl, exp, ignore);
10331 if (ret)
10333 /* Propagate location information from original call to
10334 expansion of builtin. Otherwise things like
10335 maybe_emit_chk_warning, that operate on the expansion
10336 of a builtin, will use the wrong location information. */
10337 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10339 tree realret = ret;
10340 if (TREE_CODE (ret) == NOP_EXPR)
10341 realret = TREE_OPERAND (ret, 0);
10342 if (CAN_HAVE_LOCATION_P (realret)
10343 && !EXPR_HAS_LOCATION (realret))
10344 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10346 return ret;
10350 return NULL_TREE;
10353 /* Conveniently construct a function call expression. FNDECL names the
10354 function to be called and ARGLIST is a TREE_LIST of arguments. */
10356 tree
10357 build_function_call_expr (tree fndecl, tree arglist)
10359 tree fntype = TREE_TYPE (fndecl);
10360 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10361 int n = list_length (arglist);
10362 tree *argarray = (tree *) alloca (n * sizeof (tree));
10363 int i;
10365 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10366 argarray[i] = TREE_VALUE (arglist);
10367 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10370 /* Conveniently construct a function call expression. FNDECL names the
10371 function to be called, N is the number of arguments, and the "..."
10372 parameters are the argument expressions. */
10374 tree
10375 build_call_expr (tree fndecl, int n, ...)
10377 va_list ap;
10378 tree fntype = TREE_TYPE (fndecl);
10379 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10380 tree *argarray = (tree *) alloca (n * sizeof (tree));
10381 int i;
10383 va_start (ap, n);
10384 for (i = 0; i < n; i++)
10385 argarray[i] = va_arg (ap, tree);
10386 va_end (ap);
10387 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10390 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10391 N arguments are passed in the array ARGARRAY. */
10393 tree
10394 fold_builtin_call_array (tree type,
10395 tree fn,
10396 int n,
10397 tree *argarray)
10399 tree ret = NULL_TREE;
10400 int i;
10401 tree exp;
10403 if (TREE_CODE (fn) == ADDR_EXPR)
10405 tree fndecl = TREE_OPERAND (fn, 0);
10406 if (TREE_CODE (fndecl) == FUNCTION_DECL
10407 && DECL_BUILT_IN (fndecl))
10409 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10411 tree arglist = NULL_TREE;
10412 for (i = n - 1; i >= 0; i--)
10413 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10414 ret = targetm.fold_builtin (fndecl, arglist, false);
10415 if (ret)
10416 return ret;
10418 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10420 /* First try the transformations that don't require consing up
10421 an exp. */
10422 ret = fold_builtin_n (fndecl, argarray, n, false);
10423 if (ret)
10424 return ret;
10427 /* If we got this far, we need to build an exp. */
10428 exp = build_call_array (type, fn, n, argarray);
10429 ret = fold_builtin_varargs (fndecl, exp, false);
10430 return ret ? ret : exp;
10434 return build_call_array (type, fn, n, argarray);
10437 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10438 along with N new arguments specified as the "..." parameters. SKIP
10439 is the number of arguments in EXP to be omitted. This function is used
10440 to do varargs-to-varargs transformations. */
10442 static tree
10443 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10445 int oldnargs = call_expr_nargs (exp);
10446 int nargs = oldnargs - skip + n;
10447 tree fntype = TREE_TYPE (fndecl);
10448 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10449 tree *buffer;
10451 if (n > 0)
10453 int i, j;
10454 va_list ap;
10456 buffer = alloca (nargs * sizeof (tree));
10457 va_start (ap, n);
10458 for (i = 0; i < n; i++)
10459 buffer[i] = va_arg (ap, tree);
10460 va_end (ap);
10461 for (j = skip; j < oldnargs; j++, i++)
10462 buffer[i] = CALL_EXPR_ARG (exp, j);
10464 else
10465 buffer = CALL_EXPR_ARGP (exp) + skip;
10467 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10470 /* Validate a single argument ARG against a tree code CODE representing
10471 a type. */
10473 static bool
10474 validate_arg (tree arg, enum tree_code code)
10476 if (!arg)
10477 return false;
10478 else if (code == POINTER_TYPE)
10479 return POINTER_TYPE_P (TREE_TYPE (arg));
10480 return code == TREE_CODE (TREE_TYPE (arg));
10483 /* This function validates the types of a function call argument list
10484 against a specified list of tree_codes. If the last specifier is a 0,
10485 that represents an ellipses, otherwise the last specifier must be a
10486 VOID_TYPE. */
10488 bool
10489 validate_arglist (tree callexpr, ...)
10491 enum tree_code code;
10492 bool res = 0;
10493 va_list ap;
10494 call_expr_arg_iterator iter;
10495 tree arg;
10497 va_start (ap, callexpr);
10498 init_call_expr_arg_iterator (callexpr, &iter);
10502 code = va_arg (ap, enum tree_code);
10503 switch (code)
10505 case 0:
10506 /* This signifies an ellipses, any further arguments are all ok. */
10507 res = true;
10508 goto end;
10509 case VOID_TYPE:
10510 /* This signifies an endlink, if no arguments remain, return
10511 true, otherwise return false. */
10512 res = !more_call_expr_args_p (&iter);
10513 goto end;
10514 default:
10515 /* If no parameters remain or the parameter's code does not
10516 match the specified code, return false. Otherwise continue
10517 checking any remaining arguments. */
10518 arg = next_call_expr_arg (&iter);
10519 if (!validate_arg (arg, code))
10520 goto end;
10521 break;
10524 while (1);
10526 /* We need gotos here since we can only have one VA_CLOSE in a
10527 function. */
10528 end: ;
10529 va_end (ap);
10531 return res;
10534 /* Default target-specific builtin expander that does nothing. */
10537 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10538 rtx target ATTRIBUTE_UNUSED,
10539 rtx subtarget ATTRIBUTE_UNUSED,
10540 enum machine_mode mode ATTRIBUTE_UNUSED,
10541 int ignore ATTRIBUTE_UNUSED)
10543 return NULL_RTX;
10546 /* Returns true is EXP represents data that would potentially reside
10547 in a readonly section. */
10549 static bool
10550 readonly_data_expr (tree exp)
10552 STRIP_NOPS (exp);
10554 if (TREE_CODE (exp) != ADDR_EXPR)
10555 return false;
10557 exp = get_base_address (TREE_OPERAND (exp, 0));
10558 if (!exp)
10559 return false;
10561 /* Make sure we call decl_readonly_section only for trees it
10562 can handle (since it returns true for everything it doesn't
10563 understand). */
10564 if (TREE_CODE (exp) == STRING_CST
10565 || TREE_CODE (exp) == CONSTRUCTOR
10566 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10567 return decl_readonly_section (exp, 0);
10568 else
10569 return false;
10572 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10573 to the call, and TYPE is its return type.
10575 Return NULL_TREE if no simplification was possible, otherwise return the
10576 simplified form of the call as a tree.
10578 The simplified form may be a constant or other expression which
10579 computes the same value, but in a more efficient manner (including
10580 calls to other builtin functions).
10582 The call may contain arguments which need to be evaluated, but
10583 which are not useful to determine the result of the call. In
10584 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10585 COMPOUND_EXPR will be an argument which must be evaluated.
10586 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10587 COMPOUND_EXPR in the chain will contain the tree for the simplified
10588 form of the builtin function call. */
10590 static tree
10591 fold_builtin_strstr (tree s1, tree s2, tree type)
10593 if (!validate_arg (s1, POINTER_TYPE)
10594 || !validate_arg (s2, POINTER_TYPE))
10595 return NULL_TREE;
10596 else
10598 tree fn;
10599 const char *p1, *p2;
10601 p2 = c_getstr (s2);
10602 if (p2 == NULL)
10603 return NULL_TREE;
10605 p1 = c_getstr (s1);
10606 if (p1 != NULL)
10608 const char *r = strstr (p1, p2);
10609 tree tem;
10611 if (r == NULL)
10612 return build_int_cst (TREE_TYPE (s1), 0);
10614 /* Return an offset into the constant string argument. */
10615 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10616 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10617 return fold_convert (type, tem);
10620 /* The argument is const char *, and the result is char *, so we need
10621 a type conversion here to avoid a warning. */
10622 if (p2[0] == '\0')
10623 return fold_convert (type, s1);
10625 if (p2[1] != '\0')
10626 return NULL_TREE;
10628 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10629 if (!fn)
10630 return NULL_TREE;
10632 /* New argument list transforming strstr(s1, s2) to
10633 strchr(s1, s2[0]). */
10634 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10638 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10639 the call, and TYPE is its return type.
10641 Return NULL_TREE if no simplification was possible, otherwise return the
10642 simplified form of the call as a tree.
10644 The simplified form may be a constant or other expression which
10645 computes the same value, but in a more efficient manner (including
10646 calls to other builtin functions).
10648 The call may contain arguments which need to be evaluated, but
10649 which are not useful to determine the result of the call. In
10650 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10651 COMPOUND_EXPR will be an argument which must be evaluated.
10652 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10653 COMPOUND_EXPR in the chain will contain the tree for the simplified
10654 form of the builtin function call. */
10656 static tree
10657 fold_builtin_strchr (tree s1, tree s2, tree type)
10659 if (!validate_arg (s1, POINTER_TYPE)
10660 || !validate_arg (s2, INTEGER_TYPE))
10661 return NULL_TREE;
10662 else
10664 const char *p1;
10666 if (TREE_CODE (s2) != INTEGER_CST)
10667 return NULL_TREE;
10669 p1 = c_getstr (s1);
10670 if (p1 != NULL)
10672 char c;
10673 const char *r;
10674 tree tem;
10676 if (target_char_cast (s2, &c))
10677 return NULL_TREE;
10679 r = strchr (p1, c);
10681 if (r == NULL)
10682 return build_int_cst (TREE_TYPE (s1), 0);
10684 /* Return an offset into the constant string argument. */
10685 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10686 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10687 return fold_convert (type, tem);
10689 return NULL_TREE;
10693 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10694 the call, and TYPE is its return type.
10696 Return NULL_TREE if no simplification was possible, otherwise return the
10697 simplified form of the call as a tree.
10699 The simplified form may be a constant or other expression which
10700 computes the same value, but in a more efficient manner (including
10701 calls to other builtin functions).
10703 The call may contain arguments which need to be evaluated, but
10704 which are not useful to determine the result of the call. In
10705 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10706 COMPOUND_EXPR will be an argument which must be evaluated.
10707 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10708 COMPOUND_EXPR in the chain will contain the tree for the simplified
10709 form of the builtin function call. */
10711 static tree
10712 fold_builtin_strrchr (tree s1, tree s2, tree type)
10714 if (!validate_arg (s1, POINTER_TYPE)
10715 || !validate_arg (s2, INTEGER_TYPE))
10716 return NULL_TREE;
10717 else
10719 tree fn;
10720 const char *p1;
10722 if (TREE_CODE (s2) != INTEGER_CST)
10723 return NULL_TREE;
10725 p1 = c_getstr (s1);
10726 if (p1 != NULL)
10728 char c;
10729 const char *r;
10730 tree tem;
10732 if (target_char_cast (s2, &c))
10733 return NULL_TREE;
10735 r = strrchr (p1, c);
10737 if (r == NULL)
10738 return build_int_cst (TREE_TYPE (s1), 0);
10740 /* Return an offset into the constant string argument. */
10741 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10742 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10743 return fold_convert (type, tem);
10746 if (! integer_zerop (s2))
10747 return NULL_TREE;
10749 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10750 if (!fn)
10751 return NULL_TREE;
10753 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10754 return build_call_expr (fn, 2, s1, s2);
10758 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10759 to the call, and TYPE is its return type.
10761 Return NULL_TREE if no simplification was possible, otherwise return the
10762 simplified form of the call as a tree.
10764 The simplified form may be a constant or other expression which
10765 computes the same value, but in a more efficient manner (including
10766 calls to other builtin functions).
10768 The call may contain arguments which need to be evaluated, but
10769 which are not useful to determine the result of the call. In
10770 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10771 COMPOUND_EXPR will be an argument which must be evaluated.
10772 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10773 COMPOUND_EXPR in the chain will contain the tree for the simplified
10774 form of the builtin function call. */
10776 static tree
10777 fold_builtin_strpbrk (tree s1, tree s2, tree type)
10779 if (!validate_arg (s1, POINTER_TYPE)
10780 || !validate_arg (s2, POINTER_TYPE))
10781 return NULL_TREE;
10782 else
10784 tree fn;
10785 const char *p1, *p2;
10787 p2 = c_getstr (s2);
10788 if (p2 == NULL)
10789 return NULL_TREE;
10791 p1 = c_getstr (s1);
10792 if (p1 != NULL)
10794 const char *r = strpbrk (p1, p2);
10795 tree tem;
10797 if (r == NULL)
10798 return build_int_cst (TREE_TYPE (s1), 0);
10800 /* Return an offset into the constant string argument. */
10801 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10802 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10803 return fold_convert (type, tem);
10806 if (p2[0] == '\0')
10807 /* strpbrk(x, "") == NULL.
10808 Evaluate and ignore s1 in case it had side-effects. */
10809 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
10811 if (p2[1] != '\0')
10812 return NULL_TREE; /* Really call strpbrk. */
10814 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10815 if (!fn)
10816 return NULL_TREE;
10818 /* New argument list transforming strpbrk(s1, s2) to
10819 strchr(s1, s2[0]). */
10820 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10824 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10825 to the call.
10827 Return NULL_TREE if no simplification was possible, otherwise return the
10828 simplified form of the call as a tree.
10830 The simplified form may be a constant or other expression which
10831 computes the same value, but in a more efficient manner (including
10832 calls to other builtin functions).
10834 The call may contain arguments which need to be evaluated, but
10835 which are not useful to determine the result of the call. In
10836 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10837 COMPOUND_EXPR will be an argument which must be evaluated.
10838 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10839 COMPOUND_EXPR in the chain will contain the tree for the simplified
10840 form of the builtin function call. */
10842 static tree
10843 fold_builtin_strcat (tree dst, tree src)
10845 if (!validate_arg (dst, POINTER_TYPE)
10846 || !validate_arg (src, POINTER_TYPE))
10847 return NULL_TREE;
10848 else
10850 const char *p = c_getstr (src);
10852 /* If the string length is zero, return the dst parameter. */
10853 if (p && *p == '\0')
10854 return dst;
10856 return NULL_TREE;
10860 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10861 arguments to the call.
10863 Return NULL_TREE if no simplification was possible, otherwise return the
10864 simplified form of the call as a tree.
10866 The simplified form may be a constant or other expression which
10867 computes the same value, but in a more efficient manner (including
10868 calls to other builtin functions).
10870 The call may contain arguments which need to be evaluated, but
10871 which are not useful to determine the result of the call. In
10872 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10873 COMPOUND_EXPR will be an argument which must be evaluated.
10874 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10875 COMPOUND_EXPR in the chain will contain the tree for the simplified
10876 form of the builtin function call. */
10878 static tree
10879 fold_builtin_strncat (tree dst, tree src, tree len)
10881 if (!validate_arg (dst, POINTER_TYPE)
10882 || !validate_arg (src, POINTER_TYPE)
10883 || !validate_arg (len, INTEGER_TYPE))
10884 return NULL_TREE;
10885 else
10887 const char *p = c_getstr (src);
10889 /* If the requested length is zero, or the src parameter string
10890 length is zero, return the dst parameter. */
10891 if (integer_zerop (len) || (p && *p == '\0'))
10892 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
10894 /* If the requested len is greater than or equal to the string
10895 length, call strcat. */
10896 if (TREE_CODE (len) == INTEGER_CST && p
10897 && compare_tree_int (len, strlen (p)) >= 0)
10899 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
10901 /* If the replacement _DECL isn't initialized, don't do the
10902 transformation. */
10903 if (!fn)
10904 return NULL_TREE;
10906 return build_call_expr (fn, 2, dst, src);
10908 return NULL_TREE;
10912 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10913 to the call.
10915 Return NULL_TREE if no simplification was possible, otherwise return the
10916 simplified form of the call as a tree.
10918 The simplified form may be a constant or other expression which
10919 computes the same value, but in a more efficient manner (including
10920 calls to other builtin functions).
10922 The call may contain arguments which need to be evaluated, but
10923 which are not useful to determine the result of the call. In
10924 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10925 COMPOUND_EXPR will be an argument which must be evaluated.
10926 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10927 COMPOUND_EXPR in the chain will contain the tree for the simplified
10928 form of the builtin function call. */
10930 static tree
10931 fold_builtin_strspn (tree s1, tree s2)
10933 if (!validate_arg (s1, POINTER_TYPE)
10934 || !validate_arg (s2, POINTER_TYPE))
10935 return NULL_TREE;
10936 else
10938 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10940 /* If both arguments are constants, evaluate at compile-time. */
10941 if (p1 && p2)
10943 const size_t r = strspn (p1, p2);
10944 return size_int (r);
10947 /* If either argument is "", return NULL_TREE. */
10948 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10949 /* Evaluate and ignore both arguments in case either one has
10950 side-effects. */
10951 return omit_two_operands (integer_type_node, integer_zero_node,
10952 s1, s2);
10953 return NULL_TREE;
10957 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10958 to the call.
10960 Return NULL_TREE if no simplification was possible, otherwise return the
10961 simplified form of the call as a tree.
10963 The simplified form may be a constant or other expression which
10964 computes the same value, but in a more efficient manner (including
10965 calls to other builtin functions).
10967 The call may contain arguments which need to be evaluated, but
10968 which are not useful to determine the result of the call. In
10969 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10970 COMPOUND_EXPR will be an argument which must be evaluated.
10971 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10972 COMPOUND_EXPR in the chain will contain the tree for the simplified
10973 form of the builtin function call. */
10975 static tree
10976 fold_builtin_strcspn (tree s1, tree s2)
10978 if (!validate_arg (s1, POINTER_TYPE)
10979 || !validate_arg (s2, POINTER_TYPE))
10980 return NULL_TREE;
10981 else
10983 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10985 /* If both arguments are constants, evaluate at compile-time. */
10986 if (p1 && p2)
10988 const size_t r = strcspn (p1, p2);
10989 return size_int (r);
10992 /* If the first argument is "", return NULL_TREE. */
10993 if (p1 && *p1 == '\0')
10995 /* Evaluate and ignore argument s2 in case it has
10996 side-effects. */
10997 return omit_one_operand (integer_type_node,
10998 integer_zero_node, s2);
11001 /* If the second argument is "", return __builtin_strlen(s1). */
11002 if (p2 && *p2 == '\0')
11004 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11006 /* If the replacement _DECL isn't initialized, don't do the
11007 transformation. */
11008 if (!fn)
11009 return NULL_TREE;
11011 return build_call_expr (fn, 1, s1);
11013 return NULL_TREE;
11017 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11018 to the call. IGNORE is true if the value returned
11019 by the builtin will be ignored. UNLOCKED is true is true if this
11020 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11021 the known length of the string. Return NULL_TREE if no simplification
11022 was possible. */
11024 tree
11025 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11027 /* If we're using an unlocked function, assume the other unlocked
11028 functions exist explicitly. */
11029 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11030 : implicit_built_in_decls[BUILT_IN_FPUTC];
11031 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11032 : implicit_built_in_decls[BUILT_IN_FWRITE];
11034 /* If the return value is used, don't do the transformation. */
11035 if (!ignore)
11036 return NULL_TREE;
11038 /* Verify the arguments in the original call. */
11039 if (!validate_arg (arg0, POINTER_TYPE)
11040 || !validate_arg (arg1, POINTER_TYPE))
11041 return NULL_TREE;
11043 if (! len)
11044 len = c_strlen (arg0, 0);
11046 /* Get the length of the string passed to fputs. If the length
11047 can't be determined, punt. */
11048 if (!len
11049 || TREE_CODE (len) != INTEGER_CST)
11050 return NULL_TREE;
11052 switch (compare_tree_int (len, 1))
11054 case -1: /* length is 0, delete the call entirely . */
11055 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11057 case 0: /* length is 1, call fputc. */
11059 const char *p = c_getstr (arg0);
11061 if (p != NULL)
11063 if (fn_fputc)
11064 return build_call_expr (fn_fputc, 2,
11065 build_int_cst (NULL_TREE, p[0]), arg1);
11066 else
11067 return NULL_TREE;
11070 /* FALLTHROUGH */
11071 case 1: /* length is greater than 1, call fwrite. */
11073 /* If optimizing for size keep fputs. */
11074 if (optimize_size)
11075 return NULL_TREE;
11076 /* New argument list transforming fputs(string, stream) to
11077 fwrite(string, 1, len, stream). */
11078 if (fn_fwrite)
11079 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11080 else
11081 return NULL_TREE;
11083 default:
11084 gcc_unreachable ();
11086 return NULL_TREE;
11089 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11090 produced. False otherwise. This is done so that we don't output the error
11091 or warning twice or three times. */
11092 bool
11093 fold_builtin_next_arg (tree exp, bool va_start_p)
11095 tree fntype = TREE_TYPE (current_function_decl);
11096 int nargs = call_expr_nargs (exp);
11097 tree arg;
11099 if (TYPE_ARG_TYPES (fntype) == 0
11100 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11101 == void_type_node))
11103 error ("%<va_start%> used in function with fixed args");
11104 return true;
11107 if (va_start_p)
11109 if (va_start_p && (nargs != 2))
11111 error ("wrong number of arguments to function %<va_start%>");
11112 return true;
11114 arg = CALL_EXPR_ARG (exp, 1);
11116 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11117 when we checked the arguments and if needed issued a warning. */
11118 else
11120 if (nargs == 0)
11122 /* Evidently an out of date version of <stdarg.h>; can't validate
11123 va_start's second argument, but can still work as intended. */
11124 warning (0, "%<__builtin_next_arg%> called without an argument");
11125 return true;
11127 else if (nargs > 1)
11129 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11130 return true;
11132 arg = CALL_EXPR_ARG (exp, 0);
11135 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11136 or __builtin_next_arg (0) the first time we see it, after checking
11137 the arguments and if needed issuing a warning. */
11138 if (!integer_zerop (arg))
11140 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11142 /* Strip off all nops for the sake of the comparison. This
11143 is not quite the same as STRIP_NOPS. It does more.
11144 We must also strip off INDIRECT_EXPR for C++ reference
11145 parameters. */
11146 while (TREE_CODE (arg) == NOP_EXPR
11147 || TREE_CODE (arg) == CONVERT_EXPR
11148 || TREE_CODE (arg) == NON_LVALUE_EXPR
11149 || TREE_CODE (arg) == INDIRECT_REF)
11150 arg = TREE_OPERAND (arg, 0);
11151 if (arg != last_parm)
11153 /* FIXME: Sometimes with the tree optimizers we can get the
11154 not the last argument even though the user used the last
11155 argument. We just warn and set the arg to be the last
11156 argument so that we will get wrong-code because of
11157 it. */
11158 warning (0, "second parameter of %<va_start%> not last named argument");
11160 /* We want to verify the second parameter just once before the tree
11161 optimizers are run and then avoid keeping it in the tree,
11162 as otherwise we could warn even for correct code like:
11163 void foo (int i, ...)
11164 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11165 if (va_start_p)
11166 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11167 else
11168 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11170 return false;
11174 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11175 ORIG may be null if this is a 2-argument call. We don't attempt to
11176 simplify calls with more than 3 arguments.
11178 Return NULL_TREE if no simplification was possible, otherwise return the
11179 simplified form of the call as a tree. If IGNORED is true, it means that
11180 the caller does not use the returned value of the function. */
11182 static tree
11183 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11185 tree call, retval;
11186 const char *fmt_str = NULL;
11188 /* Verify the required arguments in the original call. We deal with two
11189 types of sprintf() calls: 'sprintf (str, fmt)' and
11190 'sprintf (dest, "%s", orig)'. */
11191 if (!validate_arg (dest, POINTER_TYPE)
11192 || !validate_arg (fmt, POINTER_TYPE))
11193 return NULL_TREE;
11194 if (orig && !validate_arg (orig, POINTER_TYPE))
11195 return NULL_TREE;
11197 /* Check whether the format is a literal string constant. */
11198 fmt_str = c_getstr (fmt);
11199 if (fmt_str == NULL)
11200 return NULL_TREE;
11202 call = NULL_TREE;
11203 retval = NULL_TREE;
11205 if (!init_target_chars ())
11206 return NULL_TREE;
11208 /* If the format doesn't contain % args or %%, use strcpy. */
11209 if (strchr (fmt_str, target_percent) == NULL)
11211 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11213 if (!fn)
11214 return NULL_TREE;
11216 /* Don't optimize sprintf (buf, "abc", ptr++). */
11217 if (orig)
11218 return NULL_TREE;
11220 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11221 'format' is known to contain no % formats. */
11222 call = build_call_expr (fn, 2, dest, fmt);
11223 if (!ignored)
11224 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11227 /* If the format is "%s", use strcpy if the result isn't used. */
11228 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11230 tree fn;
11231 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11233 if (!fn)
11234 return NULL_TREE;
11236 /* Don't crash on sprintf (str1, "%s"). */
11237 if (!orig)
11238 return NULL_TREE;
11240 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11241 if (!ignored)
11243 retval = c_strlen (orig, 1);
11244 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11245 return NULL_TREE;
11247 call = build_call_expr (fn, 2, dest, orig);
11250 if (call && retval)
11252 retval = fold_convert
11253 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11254 retval);
11255 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11257 else
11258 return call;
11261 /* Expand a call EXP to __builtin_object_size. */
11264 expand_builtin_object_size (tree exp)
11266 tree ost;
11267 int object_size_type;
11268 tree fndecl = get_callee_fndecl (exp);
11269 location_t locus = EXPR_LOCATION (exp);
11271 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11273 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11274 &locus, fndecl);
11275 expand_builtin_trap ();
11276 return const0_rtx;
11279 ost = CALL_EXPR_ARG (exp, 1);
11280 STRIP_NOPS (ost);
11282 if (TREE_CODE (ost) != INTEGER_CST
11283 || tree_int_cst_sgn (ost) < 0
11284 || compare_tree_int (ost, 3) > 0)
11286 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11287 &locus, fndecl);
11288 expand_builtin_trap ();
11289 return const0_rtx;
11292 object_size_type = tree_low_cst (ost, 0);
11294 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11297 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11298 FCODE is the BUILT_IN_* to use.
11299 Return NULL_RTX if we failed; the caller should emit a normal call,
11300 otherwise try to get the result in TARGET, if convenient (and in
11301 mode MODE if that's convenient). */
11303 static rtx
11304 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11305 enum built_in_function fcode)
11307 tree dest, src, len, size;
11309 if (!validate_arglist (exp,
11310 POINTER_TYPE,
11311 fcode == BUILT_IN_MEMSET_CHK
11312 ? INTEGER_TYPE : POINTER_TYPE,
11313 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11314 return NULL_RTX;
11316 dest = CALL_EXPR_ARG (exp, 0);
11317 src = CALL_EXPR_ARG (exp, 1);
11318 len = CALL_EXPR_ARG (exp, 2);
11319 size = CALL_EXPR_ARG (exp, 3);
11321 if (! host_integerp (size, 1))
11322 return NULL_RTX;
11324 if (host_integerp (len, 1) || integer_all_onesp (size))
11326 tree fn;
11328 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11330 location_t locus = EXPR_LOCATION (exp);
11331 warning (0, "%Hcall to %D will always overflow destination buffer",
11332 &locus, get_callee_fndecl (exp));
11333 return NULL_RTX;
11336 fn = NULL_TREE;
11337 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11338 mem{cpy,pcpy,move,set} is available. */
11339 switch (fcode)
11341 case BUILT_IN_MEMCPY_CHK:
11342 fn = built_in_decls[BUILT_IN_MEMCPY];
11343 break;
11344 case BUILT_IN_MEMPCPY_CHK:
11345 fn = built_in_decls[BUILT_IN_MEMPCPY];
11346 break;
11347 case BUILT_IN_MEMMOVE_CHK:
11348 fn = built_in_decls[BUILT_IN_MEMMOVE];
11349 break;
11350 case BUILT_IN_MEMSET_CHK:
11351 fn = built_in_decls[BUILT_IN_MEMSET];
11352 break;
11353 default:
11354 break;
11357 if (! fn)
11358 return NULL_RTX;
11360 fn = build_call_expr (fn, 3, dest, src, len);
11361 if (TREE_CODE (fn) == CALL_EXPR)
11362 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11363 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11365 else if (fcode == BUILT_IN_MEMSET_CHK)
11366 return NULL_RTX;
11367 else
11369 unsigned int dest_align
11370 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11372 /* If DEST is not a pointer type, call the normal function. */
11373 if (dest_align == 0)
11374 return NULL_RTX;
11376 /* If SRC and DEST are the same (and not volatile), do nothing. */
11377 if (operand_equal_p (src, dest, 0))
11379 tree expr;
11381 if (fcode != BUILT_IN_MEMPCPY_CHK)
11383 /* Evaluate and ignore LEN in case it has side-effects. */
11384 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11385 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11388 len = fold_convert (TREE_TYPE (dest), len);
11389 expr = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
11390 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11393 /* __memmove_chk special case. */
11394 if (fcode == BUILT_IN_MEMMOVE_CHK)
11396 unsigned int src_align
11397 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11399 if (src_align == 0)
11400 return NULL_RTX;
11402 /* If src is categorized for a readonly section we can use
11403 normal __memcpy_chk. */
11404 if (readonly_data_expr (src))
11406 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11407 if (!fn)
11408 return NULL_RTX;
11409 fn = build_call_expr (fn, 4, dest, src, len, size);
11410 if (TREE_CODE (fn) == CALL_EXPR)
11411 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11412 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11415 return NULL_RTX;
11419 /* Emit warning if a buffer overflow is detected at compile time. */
11421 static void
11422 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11424 int is_strlen = 0;
11425 tree len, size;
11426 location_t locus;
11428 switch (fcode)
11430 case BUILT_IN_STRCPY_CHK:
11431 case BUILT_IN_STPCPY_CHK:
11432 /* For __strcat_chk the warning will be emitted only if overflowing
11433 by at least strlen (dest) + 1 bytes. */
11434 case BUILT_IN_STRCAT_CHK:
11435 len = CALL_EXPR_ARG (exp, 1);
11436 size = CALL_EXPR_ARG (exp, 2);
11437 is_strlen = 1;
11438 break;
11439 case BUILT_IN_STRNCAT_CHK:
11440 case BUILT_IN_STRNCPY_CHK:
11441 len = CALL_EXPR_ARG (exp, 2);
11442 size = CALL_EXPR_ARG (exp, 3);
11443 break;
11444 case BUILT_IN_SNPRINTF_CHK:
11445 case BUILT_IN_VSNPRINTF_CHK:
11446 len = CALL_EXPR_ARG (exp, 1);
11447 size = CALL_EXPR_ARG (exp, 3);
11448 break;
11449 default:
11450 gcc_unreachable ();
11453 if (!len || !size)
11454 return;
11456 if (! host_integerp (size, 1) || integer_all_onesp (size))
11457 return;
11459 if (is_strlen)
11461 len = c_strlen (len, 1);
11462 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11463 return;
11465 else if (fcode == BUILT_IN_STRNCAT_CHK)
11467 tree src = CALL_EXPR_ARG (exp, 1);
11468 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11469 return;
11470 src = c_strlen (src, 1);
11471 if (! src || ! host_integerp (src, 1))
11473 locus = EXPR_LOCATION (exp);
11474 warning (0, "%Hcall to %D might overflow destination buffer",
11475 &locus, get_callee_fndecl (exp));
11476 return;
11478 else if (tree_int_cst_lt (src, size))
11479 return;
11481 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11482 return;
11484 locus = EXPR_LOCATION (exp);
11485 warning (0, "%Hcall to %D will always overflow destination buffer",
11486 &locus, get_callee_fndecl (exp));
11489 /* Emit warning if a buffer overflow is detected at compile time
11490 in __sprintf_chk/__vsprintf_chk calls. */
11492 static void
11493 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11495 tree dest, size, len, fmt, flag;
11496 const char *fmt_str;
11497 int nargs = call_expr_nargs (exp);
11499 /* Verify the required arguments in the original call. */
11501 if (nargs < 4)
11502 return;
11503 dest = CALL_EXPR_ARG (exp, 0);
11504 flag = CALL_EXPR_ARG (exp, 1);
11505 size = CALL_EXPR_ARG (exp, 2);
11506 fmt = CALL_EXPR_ARG (exp, 3);
11508 if (! host_integerp (size, 1) || integer_all_onesp (size))
11509 return;
11511 /* Check whether the format is a literal string constant. */
11512 fmt_str = c_getstr (fmt);
11513 if (fmt_str == NULL)
11514 return;
11516 if (!init_target_chars ())
11517 return;
11519 /* If the format doesn't contain % args or %%, we know its size. */
11520 if (strchr (fmt_str, target_percent) == 0)
11521 len = build_int_cstu (size_type_node, strlen (fmt_str));
11522 /* If the format is "%s" and first ... argument is a string literal,
11523 we know it too. */
11524 else if (fcode == BUILT_IN_SPRINTF_CHK
11525 && strcmp (fmt_str, target_percent_s) == 0)
11527 tree arg;
11529 if (nargs < 5)
11530 return;
11531 arg = CALL_EXPR_ARG (exp, 4);
11532 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11533 return;
11535 len = c_strlen (arg, 1);
11536 if (!len || ! host_integerp (len, 1))
11537 return;
11539 else
11540 return;
11542 if (! tree_int_cst_lt (len, size))
11544 location_t locus = EXPR_LOCATION (exp);
11545 warning (0, "%Hcall to %D will always overflow destination buffer",
11546 &locus, get_callee_fndecl (exp));
11550 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11551 if possible. */
11553 tree
11554 fold_builtin_object_size (tree ptr, tree ost)
11556 tree ret = NULL_TREE;
11557 int object_size_type;
11559 if (!validate_arg (ptr, POINTER_TYPE)
11560 || !validate_arg (ost, INTEGER_TYPE))
11561 return NULL_TREE;
11563 STRIP_NOPS (ost);
11565 if (TREE_CODE (ost) != INTEGER_CST
11566 || tree_int_cst_sgn (ost) < 0
11567 || compare_tree_int (ost, 3) > 0)
11568 return NULL_TREE;
11570 object_size_type = tree_low_cst (ost, 0);
11572 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11573 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11574 and (size_t) 0 for types 2 and 3. */
11575 if (TREE_SIDE_EFFECTS (ptr))
11576 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11578 if (TREE_CODE (ptr) == ADDR_EXPR)
11579 ret = build_int_cstu (size_type_node,
11580 compute_builtin_object_size (ptr, object_size_type));
11582 else if (TREE_CODE (ptr) == SSA_NAME)
11584 unsigned HOST_WIDE_INT bytes;
11586 /* If object size is not known yet, delay folding until
11587 later. Maybe subsequent passes will help determining
11588 it. */
11589 bytes = compute_builtin_object_size (ptr, object_size_type);
11590 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11591 ? -1 : 0))
11592 ret = build_int_cstu (size_type_node, bytes);
11595 if (ret)
11597 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11598 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11599 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11600 ret = NULL_TREE;
11603 return ret;
11606 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11607 DEST, SRC, LEN, and SIZE are the arguments to the call.
11608 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11609 code of the builtin. If MAXLEN is not NULL, it is maximum length
11610 passed as third argument. */
11612 tree
11613 fold_builtin_memory_chk (tree fndecl,
11614 tree dest, tree src, tree len, tree size,
11615 tree maxlen, bool ignore,
11616 enum built_in_function fcode)
11618 tree fn;
11620 if (!validate_arg (dest, POINTER_TYPE)
11621 || !validate_arg (src,
11622 (fcode == BUILT_IN_MEMSET_CHK
11623 ? INTEGER_TYPE : POINTER_TYPE))
11624 || !validate_arg (len, INTEGER_TYPE)
11625 || !validate_arg (size, INTEGER_TYPE))
11626 return NULL_TREE;
11628 /* If SRC and DEST are the same (and not volatile), return DEST
11629 (resp. DEST+LEN for __mempcpy_chk). */
11630 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11632 if (fcode != BUILT_IN_MEMPCPY_CHK)
11633 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11634 else
11636 tree temp = fold_convert (TREE_TYPE (dest), len);
11637 temp = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, temp);
11638 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11642 if (! host_integerp (size, 1))
11643 return NULL_TREE;
11645 if (! integer_all_onesp (size))
11647 if (! host_integerp (len, 1))
11649 /* If LEN is not constant, try MAXLEN too.
11650 For MAXLEN only allow optimizing into non-_ocs function
11651 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11652 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11654 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11656 /* (void) __mempcpy_chk () can be optimized into
11657 (void) __memcpy_chk (). */
11658 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11659 if (!fn)
11660 return NULL_TREE;
11662 return build_call_expr (fn, 4, dest, src, len, size);
11664 return NULL_TREE;
11667 else
11668 maxlen = len;
11670 if (tree_int_cst_lt (size, maxlen))
11671 return NULL_TREE;
11674 fn = NULL_TREE;
11675 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11676 mem{cpy,pcpy,move,set} is available. */
11677 switch (fcode)
11679 case BUILT_IN_MEMCPY_CHK:
11680 fn = built_in_decls[BUILT_IN_MEMCPY];
11681 break;
11682 case BUILT_IN_MEMPCPY_CHK:
11683 fn = built_in_decls[BUILT_IN_MEMPCPY];
11684 break;
11685 case BUILT_IN_MEMMOVE_CHK:
11686 fn = built_in_decls[BUILT_IN_MEMMOVE];
11687 break;
11688 case BUILT_IN_MEMSET_CHK:
11689 fn = built_in_decls[BUILT_IN_MEMSET];
11690 break;
11691 default:
11692 break;
11695 if (!fn)
11696 return NULL_TREE;
11698 return build_call_expr (fn, 3, dest, src, len);
11701 /* Fold a call to the __st[rp]cpy_chk builtin.
11702 DEST, SRC, and SIZE are the arguments to the call.
11703 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11704 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11705 strings passed as second argument. */
11707 tree
11708 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11709 tree maxlen, bool ignore,
11710 enum built_in_function fcode)
11712 tree len, fn;
11714 if (!validate_arg (dest, POINTER_TYPE)
11715 || !validate_arg (src, POINTER_TYPE)
11716 || !validate_arg (size, INTEGER_TYPE))
11717 return NULL_TREE;
11719 /* If SRC and DEST are the same (and not volatile), return DEST. */
11720 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11721 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11723 if (! host_integerp (size, 1))
11724 return NULL_TREE;
11726 if (! integer_all_onesp (size))
11728 len = c_strlen (src, 1);
11729 if (! len || ! host_integerp (len, 1))
11731 /* If LEN is not constant, try MAXLEN too.
11732 For MAXLEN only allow optimizing into non-_ocs function
11733 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11734 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11736 if (fcode == BUILT_IN_STPCPY_CHK)
11738 if (! ignore)
11739 return NULL_TREE;
11741 /* If return value of __stpcpy_chk is ignored,
11742 optimize into __strcpy_chk. */
11743 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11744 if (!fn)
11745 return NULL_TREE;
11747 return build_call_expr (fn, 3, dest, src, size);
11750 if (! len || TREE_SIDE_EFFECTS (len))
11751 return NULL_TREE;
11753 /* If c_strlen returned something, but not a constant,
11754 transform __strcpy_chk into __memcpy_chk. */
11755 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11756 if (!fn)
11757 return NULL_TREE;
11759 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11760 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11761 build_call_expr (fn, 4,
11762 dest, src, len, size));
11765 else
11766 maxlen = len;
11768 if (! tree_int_cst_lt (maxlen, size))
11769 return NULL_TREE;
11772 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11773 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
11774 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
11775 if (!fn)
11776 return NULL_TREE;
11778 return build_call_expr (fn, 2, dest, src);
11781 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11782 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11783 length passed as third argument. */
11785 tree
11786 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
11787 tree maxlen)
11789 tree fn;
11791 if (!validate_arg (dest, POINTER_TYPE)
11792 || !validate_arg (src, POINTER_TYPE)
11793 || !validate_arg (len, INTEGER_TYPE)
11794 || !validate_arg (size, INTEGER_TYPE))
11795 return NULL_TREE;
11797 if (! host_integerp (size, 1))
11798 return NULL_TREE;
11800 if (! integer_all_onesp (size))
11802 if (! host_integerp (len, 1))
11804 /* If LEN is not constant, try MAXLEN too.
11805 For MAXLEN only allow optimizing into non-_ocs function
11806 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11807 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11808 return NULL_TREE;
11810 else
11811 maxlen = len;
11813 if (tree_int_cst_lt (size, maxlen))
11814 return NULL_TREE;
11817 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11818 fn = built_in_decls[BUILT_IN_STRNCPY];
11819 if (!fn)
11820 return NULL_TREE;
11822 return build_call_expr (fn, 3, dest, src, len);
11825 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11826 are the arguments to the call. */
11828 static tree
11829 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
11831 tree fn;
11832 const char *p;
11834 if (!validate_arg (dest, POINTER_TYPE)
11835 || !validate_arg (src, POINTER_TYPE)
11836 || !validate_arg (size, INTEGER_TYPE))
11837 return NULL_TREE;
11839 p = c_getstr (src);
11840 /* If the SRC parameter is "", return DEST. */
11841 if (p && *p == '\0')
11842 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11844 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
11845 return NULL_TREE;
11847 /* If __builtin_strcat_chk is used, assume strcat is available. */
11848 fn = built_in_decls[BUILT_IN_STRCAT];
11849 if (!fn)
11850 return NULL_TREE;
11852 return build_call_expr (fn, 2, dest, src);
11855 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11856 LEN, and SIZE. */
11858 static tree
11859 fold_builtin_strncat_chk (tree fndecl,
11860 tree dest, tree src, tree len, tree size)
11862 tree fn;
11863 const char *p;
11865 if (!validate_arg (dest, POINTER_TYPE)
11866 || !validate_arg (src, POINTER_TYPE)
11867 || !validate_arg (size, INTEGER_TYPE)
11868 || !validate_arg (size, INTEGER_TYPE))
11869 return NULL_TREE;
11871 p = c_getstr (src);
11872 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11873 if (p && *p == '\0')
11874 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11875 else if (integer_zerop (len))
11876 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11878 if (! host_integerp (size, 1))
11879 return NULL_TREE;
11881 if (! integer_all_onesp (size))
11883 tree src_len = c_strlen (src, 1);
11884 if (src_len
11885 && host_integerp (src_len, 1)
11886 && host_integerp (len, 1)
11887 && ! tree_int_cst_lt (len, src_len))
11889 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11890 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
11891 if (!fn)
11892 return NULL_TREE;
11894 return build_call_expr (fn, 3, dest, src, size);
11896 return NULL_TREE;
11899 /* If __builtin_strncat_chk is used, assume strncat is available. */
11900 fn = built_in_decls[BUILT_IN_STRNCAT];
11901 if (!fn)
11902 return NULL_TREE;
11904 return build_call_expr (fn, 3, dest, src, len);
11907 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
11908 a normal call should be emitted rather than expanding the function
11909 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
11911 static tree
11912 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
11914 tree dest, size, len, fn, fmt, flag;
11915 const char *fmt_str;
11916 int nargs = call_expr_nargs (exp);
11918 /* Verify the required arguments in the original call. */
11919 if (nargs < 4)
11920 return NULL_TREE;
11921 dest = CALL_EXPR_ARG (exp, 0);
11922 if (!validate_arg (dest, POINTER_TYPE))
11923 return NULL_TREE;
11924 flag = CALL_EXPR_ARG (exp, 1);
11925 if (!validate_arg (flag, INTEGER_TYPE))
11926 return NULL_TREE;
11927 size = CALL_EXPR_ARG (exp, 2);
11928 if (!validate_arg (size, INTEGER_TYPE))
11929 return NULL_TREE;
11930 fmt = CALL_EXPR_ARG (exp, 3);
11931 if (!validate_arg (fmt, POINTER_TYPE))
11932 return NULL_TREE;
11934 if (! host_integerp (size, 1))
11935 return NULL_TREE;
11937 len = NULL_TREE;
11939 if (!init_target_chars ())
11940 return NULL_TREE;
11942 /* Check whether the format is a literal string constant. */
11943 fmt_str = c_getstr (fmt);
11944 if (fmt_str != NULL)
11946 /* If the format doesn't contain % args or %%, we know the size. */
11947 if (strchr (fmt_str, target_percent) == 0)
11949 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
11950 len = build_int_cstu (size_type_node, strlen (fmt_str));
11952 /* If the format is "%s" and first ... argument is a string literal,
11953 we know the size too. */
11954 else if (fcode == BUILT_IN_SPRINTF_CHK
11955 && strcmp (fmt_str, target_percent_s) == 0)
11957 tree arg;
11959 if (nargs == 5)
11961 arg = CALL_EXPR_ARG (exp, 4);
11962 if (validate_arg (arg, POINTER_TYPE))
11964 len = c_strlen (arg, 1);
11965 if (! len || ! host_integerp (len, 1))
11966 len = NULL_TREE;
11972 if (! integer_all_onesp (size))
11974 if (! len || ! tree_int_cst_lt (len, size))
11975 return NULL_TREE;
11978 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
11979 or if format doesn't contain % chars or is "%s". */
11980 if (! integer_zerop (flag))
11982 if (fmt_str == NULL)
11983 return NULL_TREE;
11984 if (strchr (fmt_str, target_percent) != NULL
11985 && strcmp (fmt_str, target_percent_s))
11986 return NULL_TREE;
11989 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
11990 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
11991 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
11992 if (!fn)
11993 return NULL_TREE;
11995 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
11998 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
11999 a normal call should be emitted rather than expanding the function
12000 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12001 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12002 passed as second argument. */
12004 tree
12005 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12006 enum built_in_function fcode)
12008 tree dest, size, len, fn, fmt, flag;
12009 const char *fmt_str;
12011 /* Verify the required arguments in the original call. */
12012 if (call_expr_nargs (exp) < 5)
12013 return NULL_TREE;
12014 dest = CALL_EXPR_ARG (exp, 0);
12015 if (!validate_arg (dest, POINTER_TYPE))
12016 return NULL_TREE;
12017 len = CALL_EXPR_ARG (exp, 1);
12018 if (!validate_arg (len, INTEGER_TYPE))
12019 return NULL_TREE;
12020 flag = CALL_EXPR_ARG (exp, 2);
12021 if (!validate_arg (flag, INTEGER_TYPE))
12022 return NULL_TREE;
12023 size = CALL_EXPR_ARG (exp, 3);
12024 if (!validate_arg (size, INTEGER_TYPE))
12025 return NULL_TREE;
12026 fmt = CALL_EXPR_ARG (exp, 4);
12027 if (!validate_arg (fmt, POINTER_TYPE))
12028 return NULL_TREE;
12030 if (! host_integerp (size, 1))
12031 return NULL_TREE;
12033 if (! integer_all_onesp (size))
12035 if (! host_integerp (len, 1))
12037 /* If LEN is not constant, try MAXLEN too.
12038 For MAXLEN only allow optimizing into non-_ocs function
12039 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12040 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12041 return NULL_TREE;
12043 else
12044 maxlen = len;
12046 if (tree_int_cst_lt (size, maxlen))
12047 return NULL_TREE;
12050 if (!init_target_chars ())
12051 return NULL_TREE;
12053 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12054 or if format doesn't contain % chars or is "%s". */
12055 if (! integer_zerop (flag))
12057 fmt_str = c_getstr (fmt);
12058 if (fmt_str == NULL)
12059 return NULL_TREE;
12060 if (strchr (fmt_str, target_percent) != NULL
12061 && strcmp (fmt_str, target_percent_s))
12062 return NULL_TREE;
12065 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12066 available. */
12067 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12068 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12069 if (!fn)
12070 return NULL_TREE;
12072 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12075 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12076 FMT and ARG are the arguments to the call; we don't fold cases with
12077 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12079 Return NULL_TREE if no simplification was possible, otherwise return the
12080 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12081 code of the function to be simplified. */
12083 static tree
12084 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12085 enum built_in_function fcode)
12087 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12088 const char *fmt_str = NULL;
12090 /* If the return value is used, don't do the transformation. */
12091 if (! ignore)
12092 return NULL_TREE;
12094 /* Verify the required arguments in the original call. */
12095 if (!validate_arg (fmt, POINTER_TYPE))
12096 return NULL_TREE;
12098 /* Check whether the format is a literal string constant. */
12099 fmt_str = c_getstr (fmt);
12100 if (fmt_str == NULL)
12101 return NULL_TREE;
12103 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12105 /* If we're using an unlocked function, assume the other
12106 unlocked functions exist explicitly. */
12107 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12108 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12110 else
12112 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12113 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12116 if (!init_target_chars ())
12117 return NULL_TREE;
12119 if (strcmp (fmt_str, target_percent_s) == 0
12120 || strchr (fmt_str, target_percent) == NULL)
12122 const char *str;
12124 if (strcmp (fmt_str, target_percent_s) == 0)
12126 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12127 return NULL_TREE;
12129 if (!arg || !validate_arg (arg, POINTER_TYPE))
12130 return NULL_TREE;
12132 str = c_getstr (arg);
12133 if (str == NULL)
12134 return NULL_TREE;
12136 else
12138 /* The format specifier doesn't contain any '%' characters. */
12139 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12140 && arg)
12141 return NULL_TREE;
12142 str = fmt_str;
12145 /* If the string was "", printf does nothing. */
12146 if (str[0] == '\0')
12147 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12149 /* If the string has length of 1, call putchar. */
12150 if (str[1] == '\0')
12152 /* Given printf("c"), (where c is any one character,)
12153 convert "c"[0] to an int and pass that to the replacement
12154 function. */
12155 newarg = build_int_cst (NULL_TREE, str[0]);
12156 if (fn_putchar)
12157 call = build_call_expr (fn_putchar, 1, newarg);
12159 else
12161 /* If the string was "string\n", call puts("string"). */
12162 size_t len = strlen (str);
12163 if ((unsigned char)str[len - 1] == target_newline)
12165 /* Create a NUL-terminated string that's one char shorter
12166 than the original, stripping off the trailing '\n'. */
12167 char *newstr = alloca (len);
12168 memcpy (newstr, str, len - 1);
12169 newstr[len - 1] = 0;
12171 newarg = build_string_literal (len, newstr);
12172 if (fn_puts)
12173 call = build_call_expr (fn_puts, 1, newarg);
12175 else
12176 /* We'd like to arrange to call fputs(string,stdout) here,
12177 but we need stdout and don't have a way to get it yet. */
12178 return NULL_TREE;
12182 /* The other optimizations can be done only on the non-va_list variants. */
12183 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12184 return NULL_TREE;
12186 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12187 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12189 if (!arg || !validate_arg (arg, POINTER_TYPE))
12190 return NULL_TREE;
12191 if (fn_puts)
12192 call = build_call_expr (fn_puts, 1, arg);
12195 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12196 else if (strcmp (fmt_str, target_percent_c) == 0)
12198 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12199 return NULL_TREE;
12200 if (fn_putchar)
12201 call = build_call_expr (fn_putchar, 1, arg);
12204 if (!call)
12205 return NULL_TREE;
12207 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12210 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12211 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12212 more than 3 arguments, and ARG may be null in the 2-argument case.
12214 Return NULL_TREE if no simplification was possible, otherwise return the
12215 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12216 code of the function to be simplified. */
12218 static tree
12219 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12220 enum built_in_function fcode)
12222 tree fn_fputc, fn_fputs, call = NULL_TREE;
12223 const char *fmt_str = NULL;
12225 /* If the return value is used, don't do the transformation. */
12226 if (! ignore)
12227 return NULL_TREE;
12229 /* Verify the required arguments in the original call. */
12230 if (!validate_arg (fp, POINTER_TYPE))
12231 return NULL_TREE;
12232 if (!validate_arg (fmt, POINTER_TYPE))
12233 return NULL_TREE;
12235 /* Check whether the format is a literal string constant. */
12236 fmt_str = c_getstr (fmt);
12237 if (fmt_str == NULL)
12238 return NULL_TREE;
12240 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12242 /* If we're using an unlocked function, assume the other
12243 unlocked functions exist explicitly. */
12244 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12245 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12247 else
12249 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12250 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12253 if (!init_target_chars ())
12254 return NULL_TREE;
12256 /* If the format doesn't contain % args or %%, use strcpy. */
12257 if (strchr (fmt_str, target_percent) == NULL)
12259 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12260 && arg)
12261 return NULL_TREE;
12263 /* If the format specifier was "", fprintf does nothing. */
12264 if (fmt_str[0] == '\0')
12266 /* If FP has side-effects, just wait until gimplification is
12267 done. */
12268 if (TREE_SIDE_EFFECTS (fp))
12269 return NULL_TREE;
12271 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12274 /* When "string" doesn't contain %, replace all cases of
12275 fprintf (fp, string) with fputs (string, fp). The fputs
12276 builtin will take care of special cases like length == 1. */
12277 if (fn_fputs)
12278 call = build_call_expr (fn_fputs, 2, fmt, fp);
12281 /* The other optimizations can be done only on the non-va_list variants. */
12282 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12283 return NULL_TREE;
12285 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12286 else if (strcmp (fmt_str, target_percent_s) == 0)
12288 if (!arg || !validate_arg (arg, POINTER_TYPE))
12289 return NULL_TREE;
12290 if (fn_fputs)
12291 call = build_call_expr (fn_fputs, 2, arg, fp);
12294 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12295 else if (strcmp (fmt_str, target_percent_c) == 0)
12297 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12298 return NULL_TREE;
12299 if (fn_fputc)
12300 call = build_call_expr (fn_fputc, 2, arg, fp);
12303 if (!call)
12304 return NULL_TREE;
12305 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12308 /* Initialize format string characters in the target charset. */
12310 static bool
12311 init_target_chars (void)
12313 static bool init;
12314 if (!init)
12316 target_newline = lang_hooks.to_target_charset ('\n');
12317 target_percent = lang_hooks.to_target_charset ('%');
12318 target_c = lang_hooks.to_target_charset ('c');
12319 target_s = lang_hooks.to_target_charset ('s');
12320 if (target_newline == 0 || target_percent == 0 || target_c == 0
12321 || target_s == 0)
12322 return false;
12324 target_percent_c[0] = target_percent;
12325 target_percent_c[1] = target_c;
12326 target_percent_c[2] = '\0';
12328 target_percent_s[0] = target_percent;
12329 target_percent_s[1] = target_s;
12330 target_percent_s[2] = '\0';
12332 target_percent_s_newline[0] = target_percent;
12333 target_percent_s_newline[1] = target_s;
12334 target_percent_s_newline[2] = target_newline;
12335 target_percent_s_newline[3] = '\0';
12337 init = true;
12339 return true;
12342 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12343 and no overflow/underflow occurred. INEXACT is true if M was not
12344 exactly calculated. TYPE is the tree type for the result. This
12345 function assumes that you cleared the MPFR flags and then
12346 calculated M to see if anything subsequently set a flag prior to
12347 entering this function. Return NULL_TREE if any checks fail. */
12349 static tree
12350 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12352 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12353 overflow/underflow occurred. If -frounding-math, proceed iff the
12354 result of calling FUNC was exact. */
12355 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12356 && (!flag_rounding_math || !inexact))
12358 REAL_VALUE_TYPE rr;
12360 real_from_mpfr (&rr, m, type, GMP_RNDN);
12361 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12362 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12363 but the mpft_t is not, then we underflowed in the
12364 conversion. */
12365 if (!real_isnan (&rr) && !real_isinf (&rr)
12366 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12368 REAL_VALUE_TYPE rmode;
12370 real_convert (&rmode, TYPE_MODE (type), &rr);
12371 /* Proceed iff the specified mode can hold the value. */
12372 if (real_identical (&rmode, &rr))
12373 return build_real (type, rmode);
12376 return NULL_TREE;
12379 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12380 FUNC on it and return the resulting value as a tree with type TYPE.
12381 If MIN and/or MAX are not NULL, then the supplied ARG must be
12382 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12383 acceptable values, otherwise they are not. The mpfr precision is
12384 set to the precision of TYPE. We assume that function FUNC returns
12385 zero if the result could be calculated exactly within the requested
12386 precision. */
12388 static tree
12389 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12390 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12391 bool inclusive)
12393 tree result = NULL_TREE;
12395 STRIP_NOPS (arg);
12397 /* To proceed, MPFR must exactly represent the target floating point
12398 format, which only happens when the target base equals two. */
12399 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12400 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12402 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12404 if (!real_isnan (ra) && !real_isinf (ra)
12405 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12406 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12408 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12409 int inexact;
12410 mpfr_t m;
12412 mpfr_init2 (m, prec);
12413 mpfr_from_real (m, ra, GMP_RNDN);
12414 mpfr_clear_flags ();
12415 inexact = func (m, m, GMP_RNDN);
12416 result = do_mpfr_ckconv (m, type, inexact);
12417 mpfr_clear (m);
12421 return result;
12424 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12425 FUNC on it and return the resulting value as a tree with type TYPE.
12426 The mpfr precision is set to the precision of TYPE. We assume that
12427 function FUNC returns zero if the result could be calculated
12428 exactly within the requested precision. */
12430 static tree
12431 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12432 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12434 tree result = NULL_TREE;
12436 STRIP_NOPS (arg1);
12437 STRIP_NOPS (arg2);
12439 /* To proceed, MPFR must exactly represent the target floating point
12440 format, which only happens when the target base equals two. */
12441 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12442 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12443 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12445 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12446 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12448 if (!real_isnan (ra1) && !real_isinf (ra1)
12449 && !real_isnan (ra2) && !real_isinf (ra2))
12451 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12452 int inexact;
12453 mpfr_t m1, m2;
12455 mpfr_inits2 (prec, m1, m2, NULL);
12456 mpfr_from_real (m1, ra1, GMP_RNDN);
12457 mpfr_from_real (m2, ra2, GMP_RNDN);
12458 mpfr_clear_flags ();
12459 inexact = func (m1, m1, m2, GMP_RNDN);
12460 result = do_mpfr_ckconv (m1, type, inexact);
12461 mpfr_clears (m1, m2, NULL);
12465 return result;
12468 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12469 FUNC on it and return the resulting value as a tree with type TYPE.
12470 The mpfr precision is set to the precision of TYPE. We assume that
12471 function FUNC returns zero if the result could be calculated
12472 exactly within the requested precision. */
12474 static tree
12475 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12476 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12478 tree result = NULL_TREE;
12480 STRIP_NOPS (arg1);
12481 STRIP_NOPS (arg2);
12482 STRIP_NOPS (arg3);
12484 /* To proceed, MPFR must exactly represent the target floating point
12485 format, which only happens when the target base equals two. */
12486 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12487 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12488 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12489 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12491 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12492 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12493 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12495 if (!real_isnan (ra1) && !real_isinf (ra1)
12496 && !real_isnan (ra2) && !real_isinf (ra2)
12497 && !real_isnan (ra3) && !real_isinf (ra3))
12499 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12500 int inexact;
12501 mpfr_t m1, m2, m3;
12503 mpfr_inits2 (prec, m1, m2, m3, NULL);
12504 mpfr_from_real (m1, ra1, GMP_RNDN);
12505 mpfr_from_real (m2, ra2, GMP_RNDN);
12506 mpfr_from_real (m3, ra3, GMP_RNDN);
12507 mpfr_clear_flags ();
12508 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12509 result = do_mpfr_ckconv (m1, type, inexact);
12510 mpfr_clears (m1, m2, m3, NULL);
12514 return result;
12517 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12518 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12519 If ARG_SINP and ARG_COSP are NULL then the result is returned
12520 as a complex value.
12521 The type is taken from the type of ARG and is used for setting the
12522 precision of the calculation and results. */
12524 static tree
12525 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12527 tree const type = TREE_TYPE (arg);
12528 tree result = NULL_TREE;
12530 STRIP_NOPS (arg);
12532 /* To proceed, MPFR must exactly represent the target floating point
12533 format, which only happens when the target base equals two. */
12534 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12535 && TREE_CODE (arg) == REAL_CST
12536 && !TREE_OVERFLOW (arg))
12538 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12540 if (!real_isnan (ra) && !real_isinf (ra))
12542 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12543 tree result_s, result_c;
12544 int inexact;
12545 mpfr_t m, ms, mc;
12547 mpfr_inits2 (prec, m, ms, mc, NULL);
12548 mpfr_from_real (m, ra, GMP_RNDN);
12549 mpfr_clear_flags ();
12550 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12551 result_s = do_mpfr_ckconv (ms, type, inexact);
12552 result_c = do_mpfr_ckconv (mc, type, inexact);
12553 mpfr_clears (m, ms, mc, NULL);
12554 if (result_s && result_c)
12556 /* If we are to return in a complex value do so. */
12557 if (!arg_sinp && !arg_cosp)
12558 return build_complex (build_complex_type (type),
12559 result_c, result_s);
12561 /* Dereference the sin/cos pointer arguments. */
12562 arg_sinp = build_fold_indirect_ref (arg_sinp);
12563 arg_cosp = build_fold_indirect_ref (arg_cosp);
12564 /* Proceed if valid pointer type were passed in. */
12565 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12566 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12568 /* Set the values. */
12569 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12570 result_s);
12571 TREE_SIDE_EFFECTS (result_s) = 1;
12572 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12573 result_c);
12574 TREE_SIDE_EFFECTS (result_c) = 1;
12575 /* Combine the assignments into a compound expr. */
12576 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12577 result_s, result_c));
12582 return result;
12585 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12586 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12587 two-argument mpfr order N Bessel function FUNC on them and return
12588 the resulting value as a tree with type TYPE. The mpfr precision
12589 is set to the precision of TYPE. We assume that function FUNC
12590 returns zero if the result could be calculated exactly within the
12591 requested precision. */
12592 static tree
12593 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12594 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12595 const REAL_VALUE_TYPE *min, bool inclusive)
12597 tree result = NULL_TREE;
12599 STRIP_NOPS (arg1);
12600 STRIP_NOPS (arg2);
12602 /* To proceed, MPFR must exactly represent the target floating point
12603 format, which only happens when the target base equals two. */
12604 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12605 && host_integerp (arg1, 0)
12606 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12608 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12609 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12611 if (n == (long)n
12612 && !real_isnan (ra) && !real_isinf (ra)
12613 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12615 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12616 int inexact;
12617 mpfr_t m;
12619 mpfr_init2 (m, prec);
12620 mpfr_from_real (m, ra, GMP_RNDN);
12621 mpfr_clear_flags ();
12622 inexact = func (m, n, m, GMP_RNDN);
12623 result = do_mpfr_ckconv (m, type, inexact);
12624 mpfr_clear (m);
12628 return result;
12631 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12632 the pointer *(ARG_QUO) and return the result. The type is taken
12633 from the type of ARG0 and is used for setting the precision of the
12634 calculation and results. */
12636 static tree
12637 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12639 tree const type = TREE_TYPE (arg0);
12640 tree result = NULL_TREE;
12642 STRIP_NOPS (arg0);
12643 STRIP_NOPS (arg1);
12645 /* To proceed, MPFR must exactly represent the target floating point
12646 format, which only happens when the target base equals two. */
12647 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12648 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12649 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12651 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12652 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12654 if (!real_isnan (ra0) && !real_isinf (ra0)
12655 && !real_isnan (ra1) && !real_isinf (ra1))
12657 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12658 tree result_rem;
12659 long integer_quo;
12660 mpfr_t m0, m1;
12662 mpfr_inits2 (prec, m0, m1, NULL);
12663 mpfr_from_real (m0, ra0, GMP_RNDN);
12664 mpfr_from_real (m1, ra1, GMP_RNDN);
12665 mpfr_clear_flags ();
12666 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12667 /* Remquo is independent of the rounding mode, so pass
12668 inexact=0 to do_mpfr_ckconv(). */
12669 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12670 mpfr_clears (m0, m1, NULL);
12671 if (result_rem)
12673 /* MPFR calculates quo in the host's long so it may
12674 return more bits in quo than the target int can hold
12675 if sizeof(host long) > sizeof(target int). This can
12676 happen even for native compilers in LP64 mode. In
12677 these cases, modulo the quo value with the largest
12678 number that the target int can hold while leaving one
12679 bit for the sign. */
12680 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12681 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12683 /* Dereference the quo pointer argument. */
12684 arg_quo = build_fold_indirect_ref (arg_quo);
12685 /* Proceed iff a valid pointer type was passed in. */
12686 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12688 /* Set the value. */
12689 tree result_quo = fold_build2 (MODIFY_EXPR,
12690 TREE_TYPE (arg_quo), arg_quo,
12691 build_int_cst (NULL, integer_quo));
12692 TREE_SIDE_EFFECTS (result_quo) = 1;
12693 /* Combine the quo assignment with the rem. */
12694 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12695 result_quo, result_rem));
12700 return result;
12703 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12704 resulting value as a tree with type TYPE. The mpfr precision is
12705 set to the precision of TYPE. We assume that this mpfr function
12706 returns zero if the result could be calculated exactly within the
12707 requested precision. In addition, the integer pointer represented
12708 by ARG_SG will be dereferenced and set to the appropriate signgam
12709 (-1,1) value. */
12711 static tree
12712 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12714 tree result = NULL_TREE;
12716 STRIP_NOPS (arg);
12718 /* To proceed, MPFR must exactly represent the target floating point
12719 format, which only happens when the target base equals two. Also
12720 verify ARG is a constant and that ARG_SG is an int pointer. */
12721 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12722 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12723 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12724 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12726 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12728 /* In addition to NaN and Inf, the argument cannot be zero or a
12729 negative integer. */
12730 if (!real_isnan (ra) && !real_isinf (ra)
12731 && ra->cl != rvc_zero
12732 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
12734 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12735 int inexact, sg;
12736 mpfr_t m;
12737 tree result_lg;
12739 mpfr_init2 (m, prec);
12740 mpfr_from_real (m, ra, GMP_RNDN);
12741 mpfr_clear_flags ();
12742 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
12743 result_lg = do_mpfr_ckconv (m, type, inexact);
12744 mpfr_clear (m);
12745 if (result_lg)
12747 tree result_sg;
12749 /* Dereference the arg_sg pointer argument. */
12750 arg_sg = build_fold_indirect_ref (arg_sg);
12751 /* Assign the signgam value into *arg_sg. */
12752 result_sg = fold_build2 (MODIFY_EXPR,
12753 TREE_TYPE (arg_sg), arg_sg,
12754 build_int_cst (NULL, sg));
12755 TREE_SIDE_EFFECTS (result_sg) = 1;
12756 /* Combine the signgam assignment with the lgamma result. */
12757 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12758 result_sg, result_lg));
12763 return result;
12765 #endif