re PR middle-end/32441 (ICE in expand_expr_real_1, at expr.c:7109)
[official-gcc.git] / gcc / builtins.c
blob45dea02084205b348dafd0e6e062a3cf380f6b34
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "tree-gimple.h"
32 #include "flags.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "except.h"
36 #include "function.h"
37 #include "insn-config.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "predict.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
53 #include "diagnostic.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 #endif
59 /* Define the names of the builtin function types and codes. */
60 const char *const built_in_class_names[4]
61 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
63 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
64 const char * built_in_names[(int) END_BUILTINS] =
66 #include "builtins.def"
68 #undef DEF_BUILTIN
70 /* Setup an array of _DECL trees, make sure each element is
71 initialized to NULL_TREE. */
72 tree built_in_decls[(int) END_BUILTINS];
73 /* Declarations used when constructing the builtin implicitly in the compiler.
74 It may be NULL_TREE when this is invalid (for instance runtime is not
75 required to implement the function call in all cases). */
76 tree implicit_built_in_decls[(int) END_BUILTINS];
78 static const char *c_getstr (tree);
79 static rtx c_readstr (const char *, enum machine_mode);
80 static int target_char_cast (tree, char *);
81 static rtx get_memory_rtx (tree, tree);
82 static int apply_args_size (void);
83 static int apply_result_size (void);
84 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
85 static rtx result_vector (int, rtx);
86 #endif
87 static void expand_builtin_update_setjmp_buf (rtx);
88 static void expand_builtin_prefetch (tree);
89 static rtx expand_builtin_apply_args (void);
90 static rtx expand_builtin_apply_args_1 (void);
91 static rtx expand_builtin_apply (rtx, rtx, rtx);
92 static void expand_builtin_return (rtx);
93 static enum type_class type_to_class (tree);
94 static rtx expand_builtin_classify_type (tree);
95 static void expand_errno_check (tree, rtx);
96 static rtx expand_builtin_mathfn (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
98 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
99 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_sincos (tree);
101 static rtx expand_builtin_cexpi (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
103 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_args_info (tree);
105 static rtx expand_builtin_next_arg (void);
106 static rtx expand_builtin_va_start (tree);
107 static rtx expand_builtin_va_end (tree);
108 static rtx expand_builtin_va_copy (tree);
109 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
113 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
114 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
121 enum machine_mode, int);
122 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
123 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_bcopy (tree, int);
126 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_alloca (tree, rtx);
141 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static rtx expand_builtin_fputs (tree, rtx, bool);
144 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
145 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
146 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
147 static tree stabilize_va_list (tree, int);
148 static rtx expand_builtin_expect (tree, rtx);
149 static tree fold_builtin_constant_p (tree);
150 static tree fold_builtin_expect (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (tree);
153 static tree fold_builtin_inf (tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (tree, int, tree, int, ...);
156 static bool validate_arg (tree, enum tree_code code);
157 static bool integer_valued_real_p (tree);
158 static tree fold_trunc_transparent_mathfn (tree, tree);
159 static bool readonly_data_expr (tree);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_sqrt (tree, tree);
163 static tree fold_builtin_cbrt (tree, tree);
164 static tree fold_builtin_pow (tree, tree, tree, tree);
165 static tree fold_builtin_powi (tree, tree, tree, tree);
166 static tree fold_builtin_cos (tree, tree, tree);
167 static tree fold_builtin_cosh (tree, tree, tree);
168 static tree fold_builtin_tan (tree, tree);
169 static tree fold_builtin_trunc (tree, tree);
170 static tree fold_builtin_floor (tree, tree);
171 static tree fold_builtin_ceil (tree, tree);
172 static tree fold_builtin_round (tree, tree);
173 static tree fold_builtin_int_roundingfn (tree, tree);
174 static tree fold_builtin_bitop (tree, tree);
175 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
176 static tree fold_builtin_strchr (tree, tree, tree);
177 static tree fold_builtin_memchr (tree, tree, tree, tree);
178 static tree fold_builtin_memcmp (tree, tree, tree);
179 static tree fold_builtin_strcmp (tree, tree);
180 static tree fold_builtin_strncmp (tree, tree, tree);
181 static tree fold_builtin_signbit (tree, tree);
182 static tree fold_builtin_copysign (tree, tree, tree, tree);
183 static tree fold_builtin_isascii (tree);
184 static tree fold_builtin_toascii (tree);
185 static tree fold_builtin_isdigit (tree);
186 static tree fold_builtin_fabs (tree, tree);
187 static tree fold_builtin_abs (tree, tree);
188 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
189 enum tree_code);
190 static tree fold_builtin_n (tree, tree *, int, bool);
191 static tree fold_builtin_0 (tree, bool);
192 static tree fold_builtin_1 (tree, tree, bool);
193 static tree fold_builtin_2 (tree, tree, tree, bool);
194 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
195 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
196 static tree fold_builtin_varargs (tree, tree, bool);
198 static tree fold_builtin_strpbrk (tree, tree, tree);
199 static tree fold_builtin_strstr (tree, tree, tree);
200 static tree fold_builtin_strrchr (tree, tree, tree);
201 static tree fold_builtin_strcat (tree, tree);
202 static tree fold_builtin_strncat (tree, tree, tree);
203 static tree fold_builtin_strspn (tree, tree);
204 static tree fold_builtin_strcspn (tree, tree);
205 static tree fold_builtin_sprintf (tree, tree, tree, int);
207 static rtx expand_builtin_object_size (tree);
208 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
209 enum built_in_function);
210 static void maybe_emit_chk_warning (tree, enum built_in_function);
211 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
212 static tree fold_builtin_object_size (tree, tree);
213 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
214 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
215 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
216 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
217 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
218 enum built_in_function);
219 static bool init_target_chars (void);
221 static unsigned HOST_WIDE_INT target_newline;
222 static unsigned HOST_WIDE_INT target_percent;
223 static unsigned HOST_WIDE_INT target_c;
224 static unsigned HOST_WIDE_INT target_s;
225 static char target_percent_c[3];
226 static char target_percent_s[3];
227 static char target_percent_s_newline[4];
228 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
229 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
230 static tree do_mpfr_arg2 (tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_arg3 (tree, tree, tree, tree,
233 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
234 static tree do_mpfr_sincos (tree, tree, tree);
235 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
236 static tree do_mpfr_bessel_n (tree, tree, tree,
237 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
238 const REAL_VALUE_TYPE *, bool);
239 static tree do_mpfr_remquo (tree, tree, tree);
240 static tree do_mpfr_lgamma_r (tree, tree, tree);
241 #endif
243 /* Return true if NODE should be considered for inline expansion regardless
244 of the optimization level. This means whenever a function is invoked with
245 its "internal" name, which normally contains the prefix "__builtin". */
247 static bool called_as_built_in (tree node)
249 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
250 if (strncmp (name, "__builtin_", 10) == 0)
251 return true;
252 if (strncmp (name, "__sync_", 7) == 0)
253 return true;
254 return false;
257 /* Return the alignment in bits of EXP, a pointer valued expression.
258 But don't return more than MAX_ALIGN no matter what.
259 The alignment returned is, by default, the alignment of the thing that
260 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
262 Otherwise, look at the expression to see if we can do better, i.e., if the
263 expression is actually pointing at an object whose alignment is tighter. */
266 get_pointer_alignment (tree exp, unsigned int max_align)
268 unsigned int align, inner;
270 /* We rely on TER to compute accurate alignment information. */
271 if (!(optimize && flag_tree_ter))
272 return 0;
274 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
275 return 0;
277 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
278 align = MIN (align, max_align);
280 while (1)
282 switch (TREE_CODE (exp))
284 case NOP_EXPR:
285 case CONVERT_EXPR:
286 case NON_LVALUE_EXPR:
287 exp = TREE_OPERAND (exp, 0);
288 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
289 return align;
291 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
292 align = MIN (inner, max_align);
293 break;
295 case POINTER_PLUS_EXPR:
296 /* If sum of pointer + int, restrict our maximum alignment to that
297 imposed by the integer. If not, we can't do any better than
298 ALIGN. */
299 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
300 return align;
302 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
303 & (max_align / BITS_PER_UNIT - 1))
304 != 0)
305 max_align >>= 1;
307 exp = TREE_OPERAND (exp, 0);
308 break;
310 case ADDR_EXPR:
311 /* See what we are pointing at and look at its alignment. */
312 exp = TREE_OPERAND (exp, 0);
313 inner = max_align;
314 if (handled_component_p (exp))
316 HOST_WIDE_INT bitsize, bitpos;
317 tree offset;
318 enum machine_mode mode;
319 int unsignedp, volatilep;
321 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
322 &mode, &unsignedp, &volatilep, true);
323 if (bitpos)
324 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
325 if (offset && TREE_CODE (offset) == PLUS_EXPR
326 && host_integerp (TREE_OPERAND (offset, 1), 1))
328 /* Any overflow in calculating offset_bits won't change
329 the alignment. */
330 unsigned offset_bits
331 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
332 * BITS_PER_UNIT);
334 if (offset_bits)
335 inner = MIN (inner, (offset_bits & -offset_bits));
336 offset = TREE_OPERAND (offset, 0);
338 if (offset && TREE_CODE (offset) == MULT_EXPR
339 && host_integerp (TREE_OPERAND (offset, 1), 1))
341 /* Any overflow in calculating offset_factor won't change
342 the alignment. */
343 unsigned offset_factor
344 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
345 * BITS_PER_UNIT);
347 if (offset_factor)
348 inner = MIN (inner, (offset_factor & -offset_factor));
350 else if (offset)
351 inner = MIN (inner, BITS_PER_UNIT);
353 if (TREE_CODE (exp) == FUNCTION_DECL)
354 align = FUNCTION_BOUNDARY;
355 else if (DECL_P (exp))
356 align = MIN (inner, DECL_ALIGN (exp));
357 #ifdef CONSTANT_ALIGNMENT
358 else if (CONSTANT_CLASS_P (exp))
359 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
360 #endif
361 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
362 || TREE_CODE (exp) == INDIRECT_REF)
363 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
364 else
365 align = MIN (align, inner);
366 return MIN (align, max_align);
368 default:
369 return align;
374 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
375 way, because it could contain a zero byte in the middle.
376 TREE_STRING_LENGTH is the size of the character array, not the string.
378 ONLY_VALUE should be nonzero if the result is not going to be emitted
379 into the instruction stream and zero if it is going to be expanded.
380 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
381 is returned, otherwise NULL, since
382 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
383 evaluate the side-effects.
385 The value returned is of type `ssizetype'.
387 Unfortunately, string_constant can't access the values of const char
388 arrays with initializers, so neither can we do so here. */
390 tree
391 c_strlen (tree src, int only_value)
393 tree offset_node;
394 HOST_WIDE_INT offset;
395 int max;
396 const char *ptr;
398 STRIP_NOPS (src);
399 if (TREE_CODE (src) == COND_EXPR
400 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
402 tree len1, len2;
404 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
405 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
406 if (tree_int_cst_equal (len1, len2))
407 return len1;
410 if (TREE_CODE (src) == COMPOUND_EXPR
411 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
412 return c_strlen (TREE_OPERAND (src, 1), only_value);
414 src = string_constant (src, &offset_node);
415 if (src == 0)
416 return NULL_TREE;
418 max = TREE_STRING_LENGTH (src) - 1;
419 ptr = TREE_STRING_POINTER (src);
421 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
423 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
424 compute the offset to the following null if we don't know where to
425 start searching for it. */
426 int i;
428 for (i = 0; i < max; i++)
429 if (ptr[i] == 0)
430 return NULL_TREE;
432 /* We don't know the starting offset, but we do know that the string
433 has no internal zero bytes. We can assume that the offset falls
434 within the bounds of the string; otherwise, the programmer deserves
435 what he gets. Subtract the offset from the length of the string,
436 and return that. This would perhaps not be valid if we were dealing
437 with named arrays in addition to literal string constants. */
439 return size_diffop (size_int (max), offset_node);
442 /* We have a known offset into the string. Start searching there for
443 a null character if we can represent it as a single HOST_WIDE_INT. */
444 if (offset_node == 0)
445 offset = 0;
446 else if (! host_integerp (offset_node, 0))
447 offset = -1;
448 else
449 offset = tree_low_cst (offset_node, 0);
451 /* If the offset is known to be out of bounds, warn, and call strlen at
452 runtime. */
453 if (offset < 0 || offset > max)
455 warning (0, "offset outside bounds of constant string");
456 return NULL_TREE;
459 /* Use strlen to search for the first zero byte. Since any strings
460 constructed with build_string will have nulls appended, we win even
461 if we get handed something like (char[4])"abcd".
463 Since OFFSET is our starting index into the string, no further
464 calculation is needed. */
465 return ssize_int (strlen (ptr + offset));
468 /* Return a char pointer for a C string if it is a string constant
469 or sum of string constant and integer constant. */
471 static const char *
472 c_getstr (tree src)
474 tree offset_node;
476 src = string_constant (src, &offset_node);
477 if (src == 0)
478 return 0;
480 if (offset_node == 0)
481 return TREE_STRING_POINTER (src);
482 else if (!host_integerp (offset_node, 1)
483 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
484 return 0;
486 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
489 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
490 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
492 static rtx
493 c_readstr (const char *str, enum machine_mode mode)
495 HOST_WIDE_INT c[2];
496 HOST_WIDE_INT ch;
497 unsigned int i, j;
499 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
501 c[0] = 0;
502 c[1] = 0;
503 ch = 1;
504 for (i = 0; i < GET_MODE_SIZE (mode); i++)
506 j = i;
507 if (WORDS_BIG_ENDIAN)
508 j = GET_MODE_SIZE (mode) - i - 1;
509 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
510 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
511 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
512 j *= BITS_PER_UNIT;
513 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
515 if (ch)
516 ch = (unsigned char) str[i];
517 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
519 return immed_double_const (c[0], c[1], mode);
522 /* Cast a target constant CST to target CHAR and if that value fits into
523 host char type, return zero and put that value into variable pointed to by
524 P. */
526 static int
527 target_char_cast (tree cst, char *p)
529 unsigned HOST_WIDE_INT val, hostval;
531 if (!host_integerp (cst, 1)
532 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
533 return 1;
535 val = tree_low_cst (cst, 1);
536 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
537 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
539 hostval = val;
540 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
541 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
543 if (val != hostval)
544 return 1;
546 *p = hostval;
547 return 0;
550 /* Similar to save_expr, but assumes that arbitrary code is not executed
551 in between the multiple evaluations. In particular, we assume that a
552 non-addressable local variable will not be modified. */
554 static tree
555 builtin_save_expr (tree exp)
557 if (TREE_ADDRESSABLE (exp) == 0
558 && (TREE_CODE (exp) == PARM_DECL
559 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
560 return exp;
562 return save_expr (exp);
565 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
566 times to get the address of either a higher stack frame, or a return
567 address located within it (depending on FNDECL_CODE). */
569 static rtx
570 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
572 int i;
574 #ifdef INITIAL_FRAME_ADDRESS_RTX
575 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
576 #else
577 rtx tem;
579 /* For a zero count with __builtin_return_address, we don't care what
580 frame address we return, because target-specific definitions will
581 override us. Therefore frame pointer elimination is OK, and using
582 the soft frame pointer is OK.
584 For a nonzero count, or a zero count with __builtin_frame_address,
585 we require a stable offset from the current frame pointer to the
586 previous one, so we must use the hard frame pointer, and
587 we must disable frame pointer elimination. */
588 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
589 tem = frame_pointer_rtx;
590 else
592 tem = hard_frame_pointer_rtx;
594 /* Tell reload not to eliminate the frame pointer. */
595 current_function_accesses_prior_frames = 1;
597 #endif
599 /* Some machines need special handling before we can access
600 arbitrary frames. For example, on the SPARC, we must first flush
601 all register windows to the stack. */
602 #ifdef SETUP_FRAME_ADDRESSES
603 if (count > 0)
604 SETUP_FRAME_ADDRESSES ();
605 #endif
607 /* On the SPARC, the return address is not in the frame, it is in a
608 register. There is no way to access it off of the current frame
609 pointer, but it can be accessed off the previous frame pointer by
610 reading the value from the register window save area. */
611 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
612 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
613 count--;
614 #endif
616 /* Scan back COUNT frames to the specified frame. */
617 for (i = 0; i < count; i++)
619 /* Assume the dynamic chain pointer is in the word that the
620 frame address points to, unless otherwise specified. */
621 #ifdef DYNAMIC_CHAIN_ADDRESS
622 tem = DYNAMIC_CHAIN_ADDRESS (tem);
623 #endif
624 tem = memory_address (Pmode, tem);
625 tem = gen_frame_mem (Pmode, tem);
626 tem = copy_to_reg (tem);
629 /* For __builtin_frame_address, return what we've got. But, on
630 the SPARC for example, we may have to add a bias. */
631 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
632 #ifdef FRAME_ADDR_RTX
633 return FRAME_ADDR_RTX (tem);
634 #else
635 return tem;
636 #endif
638 /* For __builtin_return_address, get the return address from that frame. */
639 #ifdef RETURN_ADDR_RTX
640 tem = RETURN_ADDR_RTX (count, tem);
641 #else
642 tem = memory_address (Pmode,
643 plus_constant (tem, GET_MODE_SIZE (Pmode)));
644 tem = gen_frame_mem (Pmode, tem);
645 #endif
646 return tem;
649 /* Alias set used for setjmp buffer. */
650 static HOST_WIDE_INT setjmp_alias_set = -1;
652 /* Construct the leading half of a __builtin_setjmp call. Control will
653 return to RECEIVER_LABEL. This is also called directly by the SJLJ
654 exception handling code. */
656 void
657 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
659 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
660 rtx stack_save;
661 rtx mem;
663 if (setjmp_alias_set == -1)
664 setjmp_alias_set = new_alias_set ();
666 buf_addr = convert_memory_address (Pmode, buf_addr);
668 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
670 /* We store the frame pointer and the address of receiver_label in
671 the buffer and use the rest of it for the stack save area, which
672 is machine-dependent. */
674 mem = gen_rtx_MEM (Pmode, buf_addr);
675 set_mem_alias_set (mem, setjmp_alias_set);
676 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
678 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
679 set_mem_alias_set (mem, setjmp_alias_set);
681 emit_move_insn (validize_mem (mem),
682 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
684 stack_save = gen_rtx_MEM (sa_mode,
685 plus_constant (buf_addr,
686 2 * GET_MODE_SIZE (Pmode)));
687 set_mem_alias_set (stack_save, setjmp_alias_set);
688 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
690 /* If there is further processing to do, do it. */
691 #ifdef HAVE_builtin_setjmp_setup
692 if (HAVE_builtin_setjmp_setup)
693 emit_insn (gen_builtin_setjmp_setup (buf_addr));
694 #endif
696 /* Tell optimize_save_area_alloca that extra work is going to
697 need to go on during alloca. */
698 current_function_calls_setjmp = 1;
700 /* We have a nonlocal label. */
701 current_function_has_nonlocal_label = 1;
704 /* Construct the trailing part of a __builtin_setjmp call. This is
705 also called directly by the SJLJ exception handling code. */
707 void
708 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
710 /* Clobber the FP when we get here, so we have to make sure it's
711 marked as used by this function. */
712 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
714 /* Mark the static chain as clobbered here so life information
715 doesn't get messed up for it. */
716 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
718 /* Now put in the code to restore the frame pointer, and argument
719 pointer, if needed. */
720 #ifdef HAVE_nonlocal_goto
721 if (! HAVE_nonlocal_goto)
722 #endif
724 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
725 /* This might change the hard frame pointer in ways that aren't
726 apparent to early optimization passes, so force a clobber. */
727 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
730 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
731 if (fixed_regs[ARG_POINTER_REGNUM])
733 #ifdef ELIMINABLE_REGS
734 size_t i;
735 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
737 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
738 if (elim_regs[i].from == ARG_POINTER_REGNUM
739 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
740 break;
742 if (i == ARRAY_SIZE (elim_regs))
743 #endif
745 /* Now restore our arg pointer from the address at which it
746 was saved in our stack frame. */
747 emit_move_insn (virtual_incoming_args_rtx,
748 copy_to_reg (get_arg_pointer_save_area (cfun)));
751 #endif
753 #ifdef HAVE_builtin_setjmp_receiver
754 if (HAVE_builtin_setjmp_receiver)
755 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
756 else
757 #endif
758 #ifdef HAVE_nonlocal_goto_receiver
759 if (HAVE_nonlocal_goto_receiver)
760 emit_insn (gen_nonlocal_goto_receiver ());
761 else
762 #endif
763 { /* Nothing */ }
765 /* We must not allow the code we just generated to be reordered by
766 scheduling. Specifically, the update of the frame pointer must
767 happen immediately, not later. */
768 emit_insn (gen_blockage ());
771 /* __builtin_longjmp is passed a pointer to an array of five words (not
772 all will be used on all machines). It operates similarly to the C
773 library function of the same name, but is more efficient. Much of
774 the code below is copied from the handling of non-local gotos. */
776 static void
777 expand_builtin_longjmp (rtx buf_addr, rtx value)
779 rtx fp, lab, stack, insn, last;
780 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
782 if (setjmp_alias_set == -1)
783 setjmp_alias_set = new_alias_set ();
785 buf_addr = convert_memory_address (Pmode, buf_addr);
787 buf_addr = force_reg (Pmode, buf_addr);
789 /* We used to store value in static_chain_rtx, but that fails if pointers
790 are smaller than integers. We instead require that the user must pass
791 a second argument of 1, because that is what builtin_setjmp will
792 return. This also makes EH slightly more efficient, since we are no
793 longer copying around a value that we don't care about. */
794 gcc_assert (value == const1_rtx);
796 last = get_last_insn ();
797 #ifdef HAVE_builtin_longjmp
798 if (HAVE_builtin_longjmp)
799 emit_insn (gen_builtin_longjmp (buf_addr));
800 else
801 #endif
803 fp = gen_rtx_MEM (Pmode, buf_addr);
804 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
805 GET_MODE_SIZE (Pmode)));
807 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
808 2 * GET_MODE_SIZE (Pmode)));
809 set_mem_alias_set (fp, setjmp_alias_set);
810 set_mem_alias_set (lab, setjmp_alias_set);
811 set_mem_alias_set (stack, setjmp_alias_set);
813 /* Pick up FP, label, and SP from the block and jump. This code is
814 from expand_goto in stmt.c; see there for detailed comments. */
815 #ifdef HAVE_nonlocal_goto
816 if (HAVE_nonlocal_goto)
817 /* We have to pass a value to the nonlocal_goto pattern that will
818 get copied into the static_chain pointer, but it does not matter
819 what that value is, because builtin_setjmp does not use it. */
820 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
821 else
822 #endif
824 lab = copy_to_reg (lab);
826 emit_insn (gen_rtx_CLOBBER (VOIDmode,
827 gen_rtx_MEM (BLKmode,
828 gen_rtx_SCRATCH (VOIDmode))));
829 emit_insn (gen_rtx_CLOBBER (VOIDmode,
830 gen_rtx_MEM (BLKmode,
831 hard_frame_pointer_rtx)));
833 emit_move_insn (hard_frame_pointer_rtx, fp);
834 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
836 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
837 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
838 emit_indirect_jump (lab);
842 /* Search backwards and mark the jump insn as a non-local goto.
843 Note that this precludes the use of __builtin_longjmp to a
844 __builtin_setjmp target in the same function. However, we've
845 already cautioned the user that these functions are for
846 internal exception handling use only. */
847 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
849 gcc_assert (insn != last);
851 if (JUMP_P (insn))
853 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
854 REG_NOTES (insn));
855 break;
857 else if (CALL_P (insn))
858 break;
862 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
863 and the address of the save area. */
865 static rtx
866 expand_builtin_nonlocal_goto (tree exp)
868 tree t_label, t_save_area;
869 rtx r_label, r_save_area, r_fp, r_sp, insn;
871 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
872 return NULL_RTX;
874 t_label = CALL_EXPR_ARG (exp, 0);
875 t_save_area = CALL_EXPR_ARG (exp, 1);
877 r_label = expand_normal (t_label);
878 r_label = convert_memory_address (Pmode, r_label);
879 r_save_area = expand_normal (t_save_area);
880 r_save_area = convert_memory_address (Pmode, r_save_area);
881 r_fp = gen_rtx_MEM (Pmode, r_save_area);
882 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
883 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
885 current_function_has_nonlocal_goto = 1;
887 #ifdef HAVE_nonlocal_goto
888 /* ??? We no longer need to pass the static chain value, afaik. */
889 if (HAVE_nonlocal_goto)
890 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
891 else
892 #endif
894 r_label = copy_to_reg (r_label);
896 emit_insn (gen_rtx_CLOBBER (VOIDmode,
897 gen_rtx_MEM (BLKmode,
898 gen_rtx_SCRATCH (VOIDmode))));
900 emit_insn (gen_rtx_CLOBBER (VOIDmode,
901 gen_rtx_MEM (BLKmode,
902 hard_frame_pointer_rtx)));
904 /* Restore frame pointer for containing function.
905 This sets the actual hard register used for the frame pointer
906 to the location of the function's incoming static chain info.
907 The non-local goto handler will then adjust it to contain the
908 proper value and reload the argument pointer, if needed. */
909 emit_move_insn (hard_frame_pointer_rtx, r_fp);
910 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
912 /* USE of hard_frame_pointer_rtx added for consistency;
913 not clear if really needed. */
914 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
915 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
916 emit_indirect_jump (r_label);
919 /* Search backwards to the jump insn and mark it as a
920 non-local goto. */
921 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
923 if (JUMP_P (insn))
925 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
926 const0_rtx, REG_NOTES (insn));
927 break;
929 else if (CALL_P (insn))
930 break;
933 return const0_rtx;
936 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
937 (not all will be used on all machines) that was passed to __builtin_setjmp.
938 It updates the stack pointer in that block to correspond to the current
939 stack pointer. */
941 static void
942 expand_builtin_update_setjmp_buf (rtx buf_addr)
944 enum machine_mode sa_mode = Pmode;
945 rtx stack_save;
948 #ifdef HAVE_save_stack_nonlocal
949 if (HAVE_save_stack_nonlocal)
950 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
951 #endif
952 #ifdef STACK_SAVEAREA_MODE
953 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
954 #endif
956 stack_save
957 = gen_rtx_MEM (sa_mode,
958 memory_address
959 (sa_mode,
960 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
962 #ifdef HAVE_setjmp
963 if (HAVE_setjmp)
964 emit_insn (gen_setjmp ());
965 #endif
967 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
970 /* Expand a call to __builtin_prefetch. For a target that does not support
971 data prefetch, evaluate the memory address argument in case it has side
972 effects. */
974 static void
975 expand_builtin_prefetch (tree exp)
977 tree arg0, arg1, arg2;
978 int nargs;
979 rtx op0, op1, op2;
981 if (!validate_arglist (exp, POINTER_TYPE, 0))
982 return;
984 arg0 = CALL_EXPR_ARG (exp, 0);
986 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
987 zero (read) and argument 2 (locality) defaults to 3 (high degree of
988 locality). */
989 nargs = call_expr_nargs (exp);
990 if (nargs > 1)
991 arg1 = CALL_EXPR_ARG (exp, 1);
992 else
993 arg1 = integer_zero_node;
994 if (nargs > 2)
995 arg2 = CALL_EXPR_ARG (exp, 2);
996 else
997 arg2 = build_int_cst (NULL_TREE, 3);
999 /* Argument 0 is an address. */
1000 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1002 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1003 if (TREE_CODE (arg1) != INTEGER_CST)
1005 error ("second argument to %<__builtin_prefetch%> must be a constant");
1006 arg1 = integer_zero_node;
1008 op1 = expand_normal (arg1);
1009 /* Argument 1 must be either zero or one. */
1010 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1012 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1013 " using zero");
1014 op1 = const0_rtx;
1017 /* Argument 2 (locality) must be a compile-time constant int. */
1018 if (TREE_CODE (arg2) != INTEGER_CST)
1020 error ("third argument to %<__builtin_prefetch%> must be a constant");
1021 arg2 = integer_zero_node;
1023 op2 = expand_normal (arg2);
1024 /* Argument 2 must be 0, 1, 2, or 3. */
1025 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1027 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1028 op2 = const0_rtx;
1031 #ifdef HAVE_prefetch
1032 if (HAVE_prefetch)
1034 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1035 (op0,
1036 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1037 || (GET_MODE (op0) != Pmode))
1039 op0 = convert_memory_address (Pmode, op0);
1040 op0 = force_reg (Pmode, op0);
1042 emit_insn (gen_prefetch (op0, op1, op2));
1044 #endif
1046 /* Don't do anything with direct references to volatile memory, but
1047 generate code to handle other side effects. */
1048 if (!MEM_P (op0) && side_effects_p (op0))
1049 emit_insn (op0);
1052 /* Get a MEM rtx for expression EXP which is the address of an operand
1053 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1054 the maximum length of the block of memory that might be accessed or
1055 NULL if unknown. */
1057 static rtx
1058 get_memory_rtx (tree exp, tree len)
1060 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1061 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1063 /* Get an expression we can use to find the attributes to assign to MEM.
1064 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1065 we can. First remove any nops. */
1066 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1067 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1068 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1069 exp = TREE_OPERAND (exp, 0);
1071 if (TREE_CODE (exp) == ADDR_EXPR)
1072 exp = TREE_OPERAND (exp, 0);
1073 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1074 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1075 else
1076 exp = NULL;
1078 /* Honor attributes derived from exp, except for the alias set
1079 (as builtin stringops may alias with anything) and the size
1080 (as stringops may access multiple array elements). */
1081 if (exp)
1083 set_mem_attributes (mem, exp, 0);
1085 /* Allow the string and memory builtins to overflow from one
1086 field into another, see http://gcc.gnu.org/PR23561.
1087 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1088 memory accessed by the string or memory builtin will fit
1089 within the field. */
1090 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1092 tree mem_expr = MEM_EXPR (mem);
1093 HOST_WIDE_INT offset = -1, length = -1;
1094 tree inner = exp;
1096 while (TREE_CODE (inner) == ARRAY_REF
1097 || TREE_CODE (inner) == NOP_EXPR
1098 || TREE_CODE (inner) == CONVERT_EXPR
1099 || TREE_CODE (inner) == NON_LVALUE_EXPR
1100 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1101 || TREE_CODE (inner) == SAVE_EXPR)
1102 inner = TREE_OPERAND (inner, 0);
1104 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1106 if (MEM_OFFSET (mem)
1107 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1108 offset = INTVAL (MEM_OFFSET (mem));
1110 if (offset >= 0 && len && host_integerp (len, 0))
1111 length = tree_low_cst (len, 0);
1113 while (TREE_CODE (inner) == COMPONENT_REF)
1115 tree field = TREE_OPERAND (inner, 1);
1116 gcc_assert (! DECL_BIT_FIELD (field));
1117 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1118 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1120 if (length >= 0
1121 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1122 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1124 HOST_WIDE_INT size
1125 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1126 /* If we can prove the memory starting at XEXP (mem, 0)
1127 and ending at XEXP (mem, 0) + LENGTH will fit into
1128 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1129 if (offset <= size
1130 && length <= size
1131 && offset + length <= size)
1132 break;
1135 if (offset >= 0
1136 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1137 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1138 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1139 / BITS_PER_UNIT;
1140 else
1142 offset = -1;
1143 length = -1;
1146 mem_expr = TREE_OPERAND (mem_expr, 0);
1147 inner = TREE_OPERAND (inner, 0);
1150 if (mem_expr == NULL)
1151 offset = -1;
1152 if (mem_expr != MEM_EXPR (mem))
1154 set_mem_expr (mem, mem_expr);
1155 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1158 set_mem_alias_set (mem, 0);
1159 set_mem_size (mem, NULL_RTX);
1162 return mem;
1165 /* Built-in functions to perform an untyped call and return. */
1167 /* For each register that may be used for calling a function, this
1168 gives a mode used to copy the register's value. VOIDmode indicates
1169 the register is not used for calling a function. If the machine
1170 has register windows, this gives only the outbound registers.
1171 INCOMING_REGNO gives the corresponding inbound register. */
1172 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1174 /* For each register that may be used for returning values, this gives
1175 a mode used to copy the register's value. VOIDmode indicates the
1176 register is not used for returning values. If the machine has
1177 register windows, this gives only the outbound registers.
1178 INCOMING_REGNO gives the corresponding inbound register. */
1179 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1181 /* For each register that may be used for calling a function, this
1182 gives the offset of that register into the block returned by
1183 __builtin_apply_args. 0 indicates that the register is not
1184 used for calling a function. */
1185 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1187 /* Return the size required for the block returned by __builtin_apply_args,
1188 and initialize apply_args_mode. */
1190 static int
1191 apply_args_size (void)
1193 static int size = -1;
1194 int align;
1195 unsigned int regno;
1196 enum machine_mode mode;
1198 /* The values computed by this function never change. */
1199 if (size < 0)
1201 /* The first value is the incoming arg-pointer. */
1202 size = GET_MODE_SIZE (Pmode);
1204 /* The second value is the structure value address unless this is
1205 passed as an "invisible" first argument. */
1206 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1207 size += GET_MODE_SIZE (Pmode);
1209 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1210 if (FUNCTION_ARG_REGNO_P (regno))
1212 mode = reg_raw_mode[regno];
1214 gcc_assert (mode != VOIDmode);
1216 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1217 if (size % align != 0)
1218 size = CEIL (size, align) * align;
1219 apply_args_reg_offset[regno] = size;
1220 size += GET_MODE_SIZE (mode);
1221 apply_args_mode[regno] = mode;
1223 else
1225 apply_args_mode[regno] = VOIDmode;
1226 apply_args_reg_offset[regno] = 0;
1229 return size;
1232 /* Return the size required for the block returned by __builtin_apply,
1233 and initialize apply_result_mode. */
1235 static int
1236 apply_result_size (void)
1238 static int size = -1;
1239 int align, regno;
1240 enum machine_mode mode;
1242 /* The values computed by this function never change. */
1243 if (size < 0)
1245 size = 0;
1247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1248 if (FUNCTION_VALUE_REGNO_P (regno))
1250 mode = reg_raw_mode[regno];
1252 gcc_assert (mode != VOIDmode);
1254 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1255 if (size % align != 0)
1256 size = CEIL (size, align) * align;
1257 size += GET_MODE_SIZE (mode);
1258 apply_result_mode[regno] = mode;
1260 else
1261 apply_result_mode[regno] = VOIDmode;
1263 /* Allow targets that use untyped_call and untyped_return to override
1264 the size so that machine-specific information can be stored here. */
1265 #ifdef APPLY_RESULT_SIZE
1266 size = APPLY_RESULT_SIZE;
1267 #endif
1269 return size;
1272 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1273 /* Create a vector describing the result block RESULT. If SAVEP is true,
1274 the result block is used to save the values; otherwise it is used to
1275 restore the values. */
1277 static rtx
1278 result_vector (int savep, rtx result)
1280 int regno, size, align, nelts;
1281 enum machine_mode mode;
1282 rtx reg, mem;
1283 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1285 size = nelts = 0;
1286 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1287 if ((mode = apply_result_mode[regno]) != VOIDmode)
1289 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1290 if (size % align != 0)
1291 size = CEIL (size, align) * align;
1292 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1293 mem = adjust_address (result, mode, size);
1294 savevec[nelts++] = (savep
1295 ? gen_rtx_SET (VOIDmode, mem, reg)
1296 : gen_rtx_SET (VOIDmode, reg, mem));
1297 size += GET_MODE_SIZE (mode);
1299 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1301 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1303 /* Save the state required to perform an untyped call with the same
1304 arguments as were passed to the current function. */
1306 static rtx
1307 expand_builtin_apply_args_1 (void)
1309 rtx registers, tem;
1310 int size, align, regno;
1311 enum machine_mode mode;
1312 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1314 /* Create a block where the arg-pointer, structure value address,
1315 and argument registers can be saved. */
1316 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1318 /* Walk past the arg-pointer and structure value address. */
1319 size = GET_MODE_SIZE (Pmode);
1320 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1321 size += GET_MODE_SIZE (Pmode);
1323 /* Save each register used in calling a function to the block. */
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if ((mode = apply_args_mode[regno]) != VOIDmode)
1327 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1328 if (size % align != 0)
1329 size = CEIL (size, align) * align;
1331 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1333 emit_move_insn (adjust_address (registers, mode, size), tem);
1334 size += GET_MODE_SIZE (mode);
1337 /* Save the arg pointer to the block. */
1338 tem = copy_to_reg (virtual_incoming_args_rtx);
1339 #ifdef STACK_GROWS_DOWNWARD
1340 /* We need the pointer as the caller actually passed them to us, not
1341 as we might have pretended they were passed. Make sure it's a valid
1342 operand, as emit_move_insn isn't expected to handle a PLUS. */
1344 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1345 NULL_RTX);
1346 #endif
1347 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1349 size = GET_MODE_SIZE (Pmode);
1351 /* Save the structure value address unless this is passed as an
1352 "invisible" first argument. */
1353 if (struct_incoming_value)
1355 emit_move_insn (adjust_address (registers, Pmode, size),
1356 copy_to_reg (struct_incoming_value));
1357 size += GET_MODE_SIZE (Pmode);
1360 /* Return the address of the block. */
1361 return copy_addr_to_reg (XEXP (registers, 0));
1364 /* __builtin_apply_args returns block of memory allocated on
1365 the stack into which is stored the arg pointer, structure
1366 value address, static chain, and all the registers that might
1367 possibly be used in performing a function call. The code is
1368 moved to the start of the function so the incoming values are
1369 saved. */
1371 static rtx
1372 expand_builtin_apply_args (void)
1374 /* Don't do __builtin_apply_args more than once in a function.
1375 Save the result of the first call and reuse it. */
1376 if (apply_args_value != 0)
1377 return apply_args_value;
1379 /* When this function is called, it means that registers must be
1380 saved on entry to this function. So we migrate the
1381 call to the first insn of this function. */
1382 rtx temp;
1383 rtx seq;
1385 start_sequence ();
1386 temp = expand_builtin_apply_args_1 ();
1387 seq = get_insns ();
1388 end_sequence ();
1390 apply_args_value = temp;
1392 /* Put the insns after the NOTE that starts the function.
1393 If this is inside a start_sequence, make the outer-level insn
1394 chain current, so the code is placed at the start of the
1395 function. */
1396 push_topmost_sequence ();
1397 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1398 pop_topmost_sequence ();
1399 return temp;
1403 /* Perform an untyped call and save the state required to perform an
1404 untyped return of whatever value was returned by the given function. */
1406 static rtx
1407 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1409 int size, align, regno;
1410 enum machine_mode mode;
1411 rtx incoming_args, result, reg, dest, src, call_insn;
1412 rtx old_stack_level = 0;
1413 rtx call_fusage = 0;
1414 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1416 arguments = convert_memory_address (Pmode, arguments);
1418 /* Create a block where the return registers can be saved. */
1419 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1421 /* Fetch the arg pointer from the ARGUMENTS block. */
1422 incoming_args = gen_reg_rtx (Pmode);
1423 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1424 #ifndef STACK_GROWS_DOWNWARD
1425 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1426 incoming_args, 0, OPTAB_LIB_WIDEN);
1427 #endif
1429 /* Push a new argument block and copy the arguments. Do not allow
1430 the (potential) memcpy call below to interfere with our stack
1431 manipulations. */
1432 do_pending_stack_adjust ();
1433 NO_DEFER_POP;
1435 /* Save the stack with nonlocal if available. */
1436 #ifdef HAVE_save_stack_nonlocal
1437 if (HAVE_save_stack_nonlocal)
1438 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1439 else
1440 #endif
1441 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1443 /* Allocate a block of memory onto the stack and copy the memory
1444 arguments to the outgoing arguments address. */
1445 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1446 dest = virtual_outgoing_args_rtx;
1447 #ifndef STACK_GROWS_DOWNWARD
1448 if (GET_CODE (argsize) == CONST_INT)
1449 dest = plus_constant (dest, -INTVAL (argsize));
1450 else
1451 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1452 #endif
1453 dest = gen_rtx_MEM (BLKmode, dest);
1454 set_mem_align (dest, PARM_BOUNDARY);
1455 src = gen_rtx_MEM (BLKmode, incoming_args);
1456 set_mem_align (src, PARM_BOUNDARY);
1457 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1459 /* Refer to the argument block. */
1460 apply_args_size ();
1461 arguments = gen_rtx_MEM (BLKmode, arguments);
1462 set_mem_align (arguments, PARM_BOUNDARY);
1464 /* Walk past the arg-pointer and structure value address. */
1465 size = GET_MODE_SIZE (Pmode);
1466 if (struct_value)
1467 size += GET_MODE_SIZE (Pmode);
1469 /* Restore each of the registers previously saved. Make USE insns
1470 for each of these registers for use in making the call. */
1471 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1472 if ((mode = apply_args_mode[regno]) != VOIDmode)
1474 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1475 if (size % align != 0)
1476 size = CEIL (size, align) * align;
1477 reg = gen_rtx_REG (mode, regno);
1478 emit_move_insn (reg, adjust_address (arguments, mode, size));
1479 use_reg (&call_fusage, reg);
1480 size += GET_MODE_SIZE (mode);
1483 /* Restore the structure value address unless this is passed as an
1484 "invisible" first argument. */
1485 size = GET_MODE_SIZE (Pmode);
1486 if (struct_value)
1488 rtx value = gen_reg_rtx (Pmode);
1489 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1490 emit_move_insn (struct_value, value);
1491 if (REG_P (struct_value))
1492 use_reg (&call_fusage, struct_value);
1493 size += GET_MODE_SIZE (Pmode);
1496 /* All arguments and registers used for the call are set up by now! */
1497 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1499 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1500 and we don't want to load it into a register as an optimization,
1501 because prepare_call_address already did it if it should be done. */
1502 if (GET_CODE (function) != SYMBOL_REF)
1503 function = memory_address (FUNCTION_MODE, function);
1505 /* Generate the actual call instruction and save the return value. */
1506 #ifdef HAVE_untyped_call
1507 if (HAVE_untyped_call)
1508 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1509 result, result_vector (1, result)));
1510 else
1511 #endif
1512 #ifdef HAVE_call_value
1513 if (HAVE_call_value)
1515 rtx valreg = 0;
1517 /* Locate the unique return register. It is not possible to
1518 express a call that sets more than one return register using
1519 call_value; use untyped_call for that. In fact, untyped_call
1520 only needs to save the return registers in the given block. */
1521 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1522 if ((mode = apply_result_mode[regno]) != VOIDmode)
1524 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1526 valreg = gen_rtx_REG (mode, regno);
1529 emit_call_insn (GEN_CALL_VALUE (valreg,
1530 gen_rtx_MEM (FUNCTION_MODE, function),
1531 const0_rtx, NULL_RTX, const0_rtx));
1533 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1535 else
1536 #endif
1537 gcc_unreachable ();
1539 /* Find the CALL insn we just emitted, and attach the register usage
1540 information. */
1541 call_insn = last_call_insn ();
1542 add_function_usage_to (call_insn, call_fusage);
1544 /* Restore the stack. */
1545 #ifdef HAVE_save_stack_nonlocal
1546 if (HAVE_save_stack_nonlocal)
1547 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1548 else
1549 #endif
1550 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1552 OK_DEFER_POP;
1554 /* Return the address of the result block. */
1555 result = copy_addr_to_reg (XEXP (result, 0));
1556 return convert_memory_address (ptr_mode, result);
1559 /* Perform an untyped return. */
1561 static void
1562 expand_builtin_return (rtx result)
1564 int size, align, regno;
1565 enum machine_mode mode;
1566 rtx reg;
1567 rtx call_fusage = 0;
1569 result = convert_memory_address (Pmode, result);
1571 apply_result_size ();
1572 result = gen_rtx_MEM (BLKmode, result);
1574 #ifdef HAVE_untyped_return
1575 if (HAVE_untyped_return)
1577 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1578 emit_barrier ();
1579 return;
1581 #endif
1583 /* Restore the return value and note that each value is used. */
1584 size = 0;
1585 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1586 if ((mode = apply_result_mode[regno]) != VOIDmode)
1588 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1589 if (size % align != 0)
1590 size = CEIL (size, align) * align;
1591 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1592 emit_move_insn (reg, adjust_address (result, mode, size));
1594 push_to_sequence (call_fusage);
1595 emit_insn (gen_rtx_USE (VOIDmode, reg));
1596 call_fusage = get_insns ();
1597 end_sequence ();
1598 size += GET_MODE_SIZE (mode);
1601 /* Put the USE insns before the return. */
1602 emit_insn (call_fusage);
1604 /* Return whatever values was restored by jumping directly to the end
1605 of the function. */
1606 expand_naked_return ();
1609 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1611 static enum type_class
1612 type_to_class (tree type)
1614 switch (TREE_CODE (type))
1616 case VOID_TYPE: return void_type_class;
1617 case INTEGER_TYPE: return integer_type_class;
1618 case ENUMERAL_TYPE: return enumeral_type_class;
1619 case BOOLEAN_TYPE: return boolean_type_class;
1620 case POINTER_TYPE: return pointer_type_class;
1621 case REFERENCE_TYPE: return reference_type_class;
1622 case OFFSET_TYPE: return offset_type_class;
1623 case REAL_TYPE: return real_type_class;
1624 case COMPLEX_TYPE: return complex_type_class;
1625 case FUNCTION_TYPE: return function_type_class;
1626 case METHOD_TYPE: return method_type_class;
1627 case RECORD_TYPE: return record_type_class;
1628 case UNION_TYPE:
1629 case QUAL_UNION_TYPE: return union_type_class;
1630 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1631 ? string_type_class : array_type_class);
1632 case LANG_TYPE: return lang_type_class;
1633 default: return no_type_class;
1637 /* Expand a call EXP to __builtin_classify_type. */
1639 static rtx
1640 expand_builtin_classify_type (tree exp)
1642 if (call_expr_nargs (exp))
1643 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1644 return GEN_INT (no_type_class);
1647 /* This helper macro, meant to be used in mathfn_built_in below,
1648 determines which among a set of three builtin math functions is
1649 appropriate for a given type mode. The `F' and `L' cases are
1650 automatically generated from the `double' case. */
1651 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1652 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1653 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1654 fcodel = BUILT_IN_MATHFN##L ; break;
1655 /* Similar to above, but appends _R after any F/L suffix. */
1656 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1657 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1658 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1659 fcodel = BUILT_IN_MATHFN##L_R ; break;
1661 /* Return mathematic function equivalent to FN but operating directly
1662 on TYPE, if available. If we can't do the conversion, return zero. */
1663 tree
1664 mathfn_built_in (tree type, enum built_in_function fn)
1666 enum built_in_function fcode, fcodef, fcodel;
1668 switch (fn)
1670 CASE_MATHFN (BUILT_IN_ACOS)
1671 CASE_MATHFN (BUILT_IN_ACOSH)
1672 CASE_MATHFN (BUILT_IN_ASIN)
1673 CASE_MATHFN (BUILT_IN_ASINH)
1674 CASE_MATHFN (BUILT_IN_ATAN)
1675 CASE_MATHFN (BUILT_IN_ATAN2)
1676 CASE_MATHFN (BUILT_IN_ATANH)
1677 CASE_MATHFN (BUILT_IN_CBRT)
1678 CASE_MATHFN (BUILT_IN_CEIL)
1679 CASE_MATHFN (BUILT_IN_CEXPI)
1680 CASE_MATHFN (BUILT_IN_COPYSIGN)
1681 CASE_MATHFN (BUILT_IN_COS)
1682 CASE_MATHFN (BUILT_IN_COSH)
1683 CASE_MATHFN (BUILT_IN_DREM)
1684 CASE_MATHFN (BUILT_IN_ERF)
1685 CASE_MATHFN (BUILT_IN_ERFC)
1686 CASE_MATHFN (BUILT_IN_EXP)
1687 CASE_MATHFN (BUILT_IN_EXP10)
1688 CASE_MATHFN (BUILT_IN_EXP2)
1689 CASE_MATHFN (BUILT_IN_EXPM1)
1690 CASE_MATHFN (BUILT_IN_FABS)
1691 CASE_MATHFN (BUILT_IN_FDIM)
1692 CASE_MATHFN (BUILT_IN_FLOOR)
1693 CASE_MATHFN (BUILT_IN_FMA)
1694 CASE_MATHFN (BUILT_IN_FMAX)
1695 CASE_MATHFN (BUILT_IN_FMIN)
1696 CASE_MATHFN (BUILT_IN_FMOD)
1697 CASE_MATHFN (BUILT_IN_FREXP)
1698 CASE_MATHFN (BUILT_IN_GAMMA)
1699 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1700 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1701 CASE_MATHFN (BUILT_IN_HYPOT)
1702 CASE_MATHFN (BUILT_IN_ILOGB)
1703 CASE_MATHFN (BUILT_IN_INF)
1704 CASE_MATHFN (BUILT_IN_ISINF)
1705 CASE_MATHFN (BUILT_IN_J0)
1706 CASE_MATHFN (BUILT_IN_J1)
1707 CASE_MATHFN (BUILT_IN_JN)
1708 CASE_MATHFN (BUILT_IN_LCEIL)
1709 CASE_MATHFN (BUILT_IN_LDEXP)
1710 CASE_MATHFN (BUILT_IN_LFLOOR)
1711 CASE_MATHFN (BUILT_IN_LGAMMA)
1712 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1713 CASE_MATHFN (BUILT_IN_LLCEIL)
1714 CASE_MATHFN (BUILT_IN_LLFLOOR)
1715 CASE_MATHFN (BUILT_IN_LLRINT)
1716 CASE_MATHFN (BUILT_IN_LLROUND)
1717 CASE_MATHFN (BUILT_IN_LOG)
1718 CASE_MATHFN (BUILT_IN_LOG10)
1719 CASE_MATHFN (BUILT_IN_LOG1P)
1720 CASE_MATHFN (BUILT_IN_LOG2)
1721 CASE_MATHFN (BUILT_IN_LOGB)
1722 CASE_MATHFN (BUILT_IN_LRINT)
1723 CASE_MATHFN (BUILT_IN_LROUND)
1724 CASE_MATHFN (BUILT_IN_MODF)
1725 CASE_MATHFN (BUILT_IN_NAN)
1726 CASE_MATHFN (BUILT_IN_NANS)
1727 CASE_MATHFN (BUILT_IN_NEARBYINT)
1728 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1729 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1730 CASE_MATHFN (BUILT_IN_POW)
1731 CASE_MATHFN (BUILT_IN_POWI)
1732 CASE_MATHFN (BUILT_IN_POW10)
1733 CASE_MATHFN (BUILT_IN_REMAINDER)
1734 CASE_MATHFN (BUILT_IN_REMQUO)
1735 CASE_MATHFN (BUILT_IN_RINT)
1736 CASE_MATHFN (BUILT_IN_ROUND)
1737 CASE_MATHFN (BUILT_IN_SCALB)
1738 CASE_MATHFN (BUILT_IN_SCALBLN)
1739 CASE_MATHFN (BUILT_IN_SCALBN)
1740 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1741 CASE_MATHFN (BUILT_IN_SIN)
1742 CASE_MATHFN (BUILT_IN_SINCOS)
1743 CASE_MATHFN (BUILT_IN_SINH)
1744 CASE_MATHFN (BUILT_IN_SQRT)
1745 CASE_MATHFN (BUILT_IN_TAN)
1746 CASE_MATHFN (BUILT_IN_TANH)
1747 CASE_MATHFN (BUILT_IN_TGAMMA)
1748 CASE_MATHFN (BUILT_IN_TRUNC)
1749 CASE_MATHFN (BUILT_IN_Y0)
1750 CASE_MATHFN (BUILT_IN_Y1)
1751 CASE_MATHFN (BUILT_IN_YN)
1753 default:
1754 return NULL_TREE;
1757 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1758 return implicit_built_in_decls[fcode];
1759 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1760 return implicit_built_in_decls[fcodef];
1761 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1762 return implicit_built_in_decls[fcodel];
1763 else
1764 return NULL_TREE;
1767 /* If errno must be maintained, expand the RTL to check if the result,
1768 TARGET, of a built-in function call, EXP, is NaN, and if so set
1769 errno to EDOM. */
1771 static void
1772 expand_errno_check (tree exp, rtx target)
1774 rtx lab = gen_label_rtx ();
1776 /* Test the result; if it is NaN, set errno=EDOM because
1777 the argument was not in the domain. */
1778 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1779 0, lab);
1781 #ifdef TARGET_EDOM
1782 /* If this built-in doesn't throw an exception, set errno directly. */
1783 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1785 #ifdef GEN_ERRNO_RTX
1786 rtx errno_rtx = GEN_ERRNO_RTX;
1787 #else
1788 rtx errno_rtx
1789 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1790 #endif
1791 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1792 emit_label (lab);
1793 return;
1795 #endif
1797 /* We can't set errno=EDOM directly; let the library call do it.
1798 Pop the arguments right away in case the call gets deleted. */
1799 NO_DEFER_POP;
1800 expand_call (exp, target, 0);
1801 OK_DEFER_POP;
1802 emit_label (lab);
1805 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1806 Return NULL_RTX if a normal call should be emitted rather than expanding
1807 the function in-line. EXP is the expression that is a call to the builtin
1808 function; if convenient, the result should be placed in TARGET.
1809 SUBTARGET may be used as the target for computing one of EXP's operands. */
1811 static rtx
1812 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1814 optab builtin_optab;
1815 rtx op0, insns, before_call;
1816 tree fndecl = get_callee_fndecl (exp);
1817 enum machine_mode mode;
1818 bool errno_set = false;
1819 tree arg, narg;
1821 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1822 return NULL_RTX;
1824 arg = CALL_EXPR_ARG (exp, 0);
1826 switch (DECL_FUNCTION_CODE (fndecl))
1828 CASE_FLT_FN (BUILT_IN_SQRT):
1829 errno_set = ! tree_expr_nonnegative_p (arg);
1830 builtin_optab = sqrt_optab;
1831 break;
1832 CASE_FLT_FN (BUILT_IN_EXP):
1833 errno_set = true; builtin_optab = exp_optab; break;
1834 CASE_FLT_FN (BUILT_IN_EXP10):
1835 CASE_FLT_FN (BUILT_IN_POW10):
1836 errno_set = true; builtin_optab = exp10_optab; break;
1837 CASE_FLT_FN (BUILT_IN_EXP2):
1838 errno_set = true; builtin_optab = exp2_optab; break;
1839 CASE_FLT_FN (BUILT_IN_EXPM1):
1840 errno_set = true; builtin_optab = expm1_optab; break;
1841 CASE_FLT_FN (BUILT_IN_LOGB):
1842 errno_set = true; builtin_optab = logb_optab; break;
1843 CASE_FLT_FN (BUILT_IN_LOG):
1844 errno_set = true; builtin_optab = log_optab; break;
1845 CASE_FLT_FN (BUILT_IN_LOG10):
1846 errno_set = true; builtin_optab = log10_optab; break;
1847 CASE_FLT_FN (BUILT_IN_LOG2):
1848 errno_set = true; builtin_optab = log2_optab; break;
1849 CASE_FLT_FN (BUILT_IN_LOG1P):
1850 errno_set = true; builtin_optab = log1p_optab; break;
1851 CASE_FLT_FN (BUILT_IN_ASIN):
1852 builtin_optab = asin_optab; break;
1853 CASE_FLT_FN (BUILT_IN_ACOS):
1854 builtin_optab = acos_optab; break;
1855 CASE_FLT_FN (BUILT_IN_TAN):
1856 builtin_optab = tan_optab; break;
1857 CASE_FLT_FN (BUILT_IN_ATAN):
1858 builtin_optab = atan_optab; break;
1859 CASE_FLT_FN (BUILT_IN_FLOOR):
1860 builtin_optab = floor_optab; break;
1861 CASE_FLT_FN (BUILT_IN_CEIL):
1862 builtin_optab = ceil_optab; break;
1863 CASE_FLT_FN (BUILT_IN_TRUNC):
1864 builtin_optab = btrunc_optab; break;
1865 CASE_FLT_FN (BUILT_IN_ROUND):
1866 builtin_optab = round_optab; break;
1867 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1868 builtin_optab = nearbyint_optab;
1869 if (flag_trapping_math)
1870 break;
1871 /* Else fallthrough and expand as rint. */
1872 CASE_FLT_FN (BUILT_IN_RINT):
1873 builtin_optab = rint_optab; break;
1874 default:
1875 gcc_unreachable ();
1878 /* Make a suitable register to place result in. */
1879 mode = TYPE_MODE (TREE_TYPE (exp));
1881 if (! flag_errno_math || ! HONOR_NANS (mode))
1882 errno_set = false;
1884 /* Before working hard, check whether the instruction is available. */
1885 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1887 target = gen_reg_rtx (mode);
1889 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1890 need to expand the argument again. This way, we will not perform
1891 side-effects more the once. */
1892 narg = builtin_save_expr (arg);
1893 if (narg != arg)
1895 arg = narg;
1896 exp = build_call_expr (fndecl, 1, arg);
1899 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1901 start_sequence ();
1903 /* Compute into TARGET.
1904 Set TARGET to wherever the result comes back. */
1905 target = expand_unop (mode, builtin_optab, op0, target, 0);
1907 if (target != 0)
1909 if (errno_set)
1910 expand_errno_check (exp, target);
1912 /* Output the entire sequence. */
1913 insns = get_insns ();
1914 end_sequence ();
1915 emit_insn (insns);
1916 return target;
1919 /* If we were unable to expand via the builtin, stop the sequence
1920 (without outputting the insns) and call to the library function
1921 with the stabilized argument list. */
1922 end_sequence ();
1925 before_call = get_last_insn ();
1927 target = expand_call (exp, target, target == const0_rtx);
1929 /* If this is a sqrt operation and we don't care about errno, try to
1930 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1931 This allows the semantics of the libcall to be visible to the RTL
1932 optimizers. */
1933 if (builtin_optab == sqrt_optab && !errno_set)
1935 /* Search backwards through the insns emitted by expand_call looking
1936 for the instruction with the REG_RETVAL note. */
1937 rtx last = get_last_insn ();
1938 while (last != before_call)
1940 if (find_reg_note (last, REG_RETVAL, NULL))
1942 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1943 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1944 two elements, i.e. symbol_ref(sqrt) and the operand. */
1945 if (note
1946 && GET_CODE (note) == EXPR_LIST
1947 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1948 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1949 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1951 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1952 /* Check operand is a register with expected mode. */
1953 if (operand
1954 && REG_P (operand)
1955 && GET_MODE (operand) == mode)
1957 /* Replace the REG_EQUAL note with a SQRT rtx. */
1958 rtx equiv = gen_rtx_SQRT (mode, operand);
1959 set_unique_reg_note (last, REG_EQUAL, equiv);
1962 break;
1964 last = PREV_INSN (last);
1968 return target;
1971 /* Expand a call to the builtin binary math functions (pow and atan2).
1972 Return NULL_RTX if a normal call should be emitted rather than expanding the
1973 function in-line. EXP is the expression that is a call to the builtin
1974 function; if convenient, the result should be placed in TARGET.
1975 SUBTARGET may be used as the target for computing one of EXP's
1976 operands. */
1978 static rtx
1979 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1981 optab builtin_optab;
1982 rtx op0, op1, insns;
1983 int op1_type = REAL_TYPE;
1984 tree fndecl = get_callee_fndecl (exp);
1985 tree arg0, arg1, narg;
1986 enum machine_mode mode;
1987 bool errno_set = true;
1988 bool stable = true;
1990 switch (DECL_FUNCTION_CODE (fndecl))
1992 CASE_FLT_FN (BUILT_IN_SCALBN):
1993 CASE_FLT_FN (BUILT_IN_SCALBLN):
1994 CASE_FLT_FN (BUILT_IN_LDEXP):
1995 op1_type = INTEGER_TYPE;
1996 default:
1997 break;
2000 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2001 return NULL_RTX;
2003 arg0 = CALL_EXPR_ARG (exp, 0);
2004 arg1 = CALL_EXPR_ARG (exp, 1);
2006 switch (DECL_FUNCTION_CODE (fndecl))
2008 CASE_FLT_FN (BUILT_IN_POW):
2009 builtin_optab = pow_optab; break;
2010 CASE_FLT_FN (BUILT_IN_ATAN2):
2011 builtin_optab = atan2_optab; break;
2012 CASE_FLT_FN (BUILT_IN_SCALB):
2013 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2014 return 0;
2015 builtin_optab = scalb_optab; break;
2016 CASE_FLT_FN (BUILT_IN_SCALBN):
2017 CASE_FLT_FN (BUILT_IN_SCALBLN):
2018 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2019 return 0;
2020 /* Fall through... */
2021 CASE_FLT_FN (BUILT_IN_LDEXP):
2022 builtin_optab = ldexp_optab; break;
2023 CASE_FLT_FN (BUILT_IN_FMOD):
2024 builtin_optab = fmod_optab; break;
2025 CASE_FLT_FN (BUILT_IN_REMAINDER):
2026 CASE_FLT_FN (BUILT_IN_DREM):
2027 builtin_optab = remainder_optab; break;
2028 default:
2029 gcc_unreachable ();
2032 /* Make a suitable register to place result in. */
2033 mode = TYPE_MODE (TREE_TYPE (exp));
2035 /* Before working hard, check whether the instruction is available. */
2036 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2037 return NULL_RTX;
2039 target = gen_reg_rtx (mode);
2041 if (! flag_errno_math || ! HONOR_NANS (mode))
2042 errno_set = false;
2044 /* Always stabilize the argument list. */
2045 narg = builtin_save_expr (arg1);
2046 if (narg != arg1)
2048 arg1 = narg;
2049 stable = false;
2051 narg = builtin_save_expr (arg0);
2052 if (narg != arg0)
2054 arg0 = narg;
2055 stable = false;
2058 if (! stable)
2059 exp = build_call_expr (fndecl, 2, arg0, arg1);
2061 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2062 op1 = expand_normal (arg1);
2064 start_sequence ();
2066 /* Compute into TARGET.
2067 Set TARGET to wherever the result comes back. */
2068 target = expand_binop (mode, builtin_optab, op0, op1,
2069 target, 0, OPTAB_DIRECT);
2071 /* If we were unable to expand via the builtin, stop the sequence
2072 (without outputting the insns) and call to the library function
2073 with the stabilized argument list. */
2074 if (target == 0)
2076 end_sequence ();
2077 return expand_call (exp, target, target == const0_rtx);
2080 if (errno_set)
2081 expand_errno_check (exp, target);
2083 /* Output the entire sequence. */
2084 insns = get_insns ();
2085 end_sequence ();
2086 emit_insn (insns);
2088 return target;
2091 /* Expand a call to the builtin sin and cos math functions.
2092 Return NULL_RTX if a normal call should be emitted rather than expanding the
2093 function in-line. EXP is the expression that is a call to the builtin
2094 function; if convenient, the result should be placed in TARGET.
2095 SUBTARGET may be used as the target for computing one of EXP's
2096 operands. */
2098 static rtx
2099 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2101 optab builtin_optab;
2102 rtx op0, insns;
2103 tree fndecl = get_callee_fndecl (exp);
2104 enum machine_mode mode;
2105 tree arg, narg;
2107 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2108 return NULL_RTX;
2110 arg = CALL_EXPR_ARG (exp, 0);
2112 switch (DECL_FUNCTION_CODE (fndecl))
2114 CASE_FLT_FN (BUILT_IN_SIN):
2115 CASE_FLT_FN (BUILT_IN_COS):
2116 builtin_optab = sincos_optab; break;
2117 default:
2118 gcc_unreachable ();
2121 /* Make a suitable register to place result in. */
2122 mode = TYPE_MODE (TREE_TYPE (exp));
2124 /* Check if sincos insn is available, otherwise fallback
2125 to sin or cos insn. */
2126 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2127 switch (DECL_FUNCTION_CODE (fndecl))
2129 CASE_FLT_FN (BUILT_IN_SIN):
2130 builtin_optab = sin_optab; break;
2131 CASE_FLT_FN (BUILT_IN_COS):
2132 builtin_optab = cos_optab; break;
2133 default:
2134 gcc_unreachable ();
2137 /* Before working hard, check whether the instruction is available. */
2138 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2140 target = gen_reg_rtx (mode);
2142 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2143 need to expand the argument again. This way, we will not perform
2144 side-effects more the once. */
2145 narg = save_expr (arg);
2146 if (narg != arg)
2148 arg = narg;
2149 exp = build_call_expr (fndecl, 1, arg);
2152 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2154 start_sequence ();
2156 /* Compute into TARGET.
2157 Set TARGET to wherever the result comes back. */
2158 if (builtin_optab == sincos_optab)
2160 int result;
2162 switch (DECL_FUNCTION_CODE (fndecl))
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2166 break;
2167 CASE_FLT_FN (BUILT_IN_COS):
2168 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2169 break;
2170 default:
2171 gcc_unreachable ();
2173 gcc_assert (result);
2175 else
2177 target = expand_unop (mode, builtin_optab, op0, target, 0);
2180 if (target != 0)
2182 /* Output the entire sequence. */
2183 insns = get_insns ();
2184 end_sequence ();
2185 emit_insn (insns);
2186 return target;
2189 /* If we were unable to expand via the builtin, stop the sequence
2190 (without outputting the insns) and call to the library function
2191 with the stabilized argument list. */
2192 end_sequence ();
2195 target = expand_call (exp, target, target == const0_rtx);
2197 return target;
2200 /* Expand a call to one of the builtin math functions that operate on
2201 floating point argument and output an integer result (ilogb, isinf,
2202 isnan, etc).
2203 Return 0 if a normal call should be emitted rather than expanding the
2204 function in-line. EXP is the expression that is a call to the builtin
2205 function; if convenient, the result should be placed in TARGET.
2206 SUBTARGET may be used as the target for computing one of EXP's operands. */
2208 static rtx
2209 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2211 optab builtin_optab;
2212 enum insn_code icode;
2213 rtx op0;
2214 tree fndecl = get_callee_fndecl (exp);
2215 enum machine_mode mode;
2216 bool errno_set = false;
2217 tree arg, narg;
2219 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2220 return NULL_RTX;
2222 arg = CALL_EXPR_ARG (exp, 0);
2224 switch (DECL_FUNCTION_CODE (fndecl))
2226 CASE_FLT_FN (BUILT_IN_ILOGB):
2227 errno_set = true; builtin_optab = ilogb_optab; break;
2228 CASE_FLT_FN (BUILT_IN_ISINF):
2229 builtin_optab = isinf_optab; break;
2230 default:
2231 gcc_unreachable ();
2234 /* There's no easy way to detect the case we need to set EDOM. */
2235 if (flag_errno_math && errno_set)
2236 return NULL_RTX;
2238 /* Optab mode depends on the mode of the input argument. */
2239 mode = TYPE_MODE (TREE_TYPE (arg));
2241 icode = builtin_optab->handlers[(int) mode].insn_code;
2243 /* Before working hard, check whether the instruction is available. */
2244 if (icode != CODE_FOR_nothing)
2246 /* Make a suitable register to place result in. */
2247 if (!target
2248 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2249 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2251 gcc_assert (insn_data[icode].operand[0].predicate
2252 (target, GET_MODE (target)));
2254 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2255 need to expand the argument again. This way, we will not perform
2256 side-effects more the once. */
2257 narg = builtin_save_expr (arg);
2258 if (narg != arg)
2260 arg = narg;
2261 exp = build_call_expr (fndecl, 1, arg);
2264 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2266 if (mode != GET_MODE (op0))
2267 op0 = convert_to_mode (mode, op0, 0);
2269 /* Compute into TARGET.
2270 Set TARGET to wherever the result comes back. */
2271 emit_unop_insn (icode, target, op0, UNKNOWN);
2272 return target;
2275 target = expand_call (exp, target, target == const0_rtx);
2277 return target;
2280 /* Expand a call to the builtin sincos math function.
2281 Return NULL_RTX if a normal call should be emitted rather than expanding the
2282 function in-line. EXP is the expression that is a call to the builtin
2283 function. */
2285 static rtx
2286 expand_builtin_sincos (tree exp)
2288 rtx op0, op1, op2, target1, target2;
2289 enum machine_mode mode;
2290 tree arg, sinp, cosp;
2291 int result;
2293 if (!validate_arglist (exp, REAL_TYPE,
2294 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2295 return NULL_RTX;
2297 arg = CALL_EXPR_ARG (exp, 0);
2298 sinp = CALL_EXPR_ARG (exp, 1);
2299 cosp = CALL_EXPR_ARG (exp, 2);
2301 /* Make a suitable register to place result in. */
2302 mode = TYPE_MODE (TREE_TYPE (arg));
2304 /* Check if sincos insn is available, otherwise emit the call. */
2305 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2306 return NULL_RTX;
2308 target1 = gen_reg_rtx (mode);
2309 target2 = gen_reg_rtx (mode);
2311 op0 = expand_normal (arg);
2312 op1 = expand_normal (build_fold_indirect_ref (sinp));
2313 op2 = expand_normal (build_fold_indirect_ref (cosp));
2315 /* Compute into target1 and target2.
2316 Set TARGET to wherever the result comes back. */
2317 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2318 gcc_assert (result);
2320 /* Move target1 and target2 to the memory locations indicated
2321 by op1 and op2. */
2322 emit_move_insn (op1, target1);
2323 emit_move_insn (op2, target2);
2325 return const0_rtx;
2328 /* Expand a call to the internal cexpi builtin to the sincos math function.
2329 EXP is the expression that is a call to the builtin function; if convenient,
2330 the result should be placed in TARGET. SUBTARGET may be used as the target
2331 for computing one of EXP's operands. */
2333 static rtx
2334 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2336 tree fndecl = get_callee_fndecl (exp);
2337 tree arg, type;
2338 enum machine_mode mode;
2339 rtx op0, op1, op2;
2341 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2342 return NULL_RTX;
2344 arg = CALL_EXPR_ARG (exp, 0);
2345 type = TREE_TYPE (arg);
2346 mode = TYPE_MODE (TREE_TYPE (arg));
2348 /* Try expanding via a sincos optab, fall back to emitting a libcall
2349 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2350 is only generated from sincos, cexp or if we have either of them. */
2351 if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2353 op1 = gen_reg_rtx (mode);
2354 op2 = gen_reg_rtx (mode);
2356 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2358 /* Compute into op1 and op2. */
2359 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2361 else if (TARGET_HAS_SINCOS)
2363 tree call, fn = NULL_TREE;
2364 tree top1, top2;
2365 rtx op1a, op2a;
2367 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2368 fn = built_in_decls[BUILT_IN_SINCOSF];
2369 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2370 fn = built_in_decls[BUILT_IN_SINCOS];
2371 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2372 fn = built_in_decls[BUILT_IN_SINCOSL];
2373 else
2374 gcc_unreachable ();
2376 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2377 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2378 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2379 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2380 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2381 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2383 /* Make sure not to fold the sincos call again. */
2384 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2385 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2386 call, 3, arg, top1, top2));
2388 else
2390 tree call, fn = NULL_TREE, narg;
2391 tree ctype = build_complex_type (type);
2393 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2394 fn = built_in_decls[BUILT_IN_CEXPF];
2395 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2396 fn = built_in_decls[BUILT_IN_CEXP];
2397 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2398 fn = built_in_decls[BUILT_IN_CEXPL];
2399 else
2400 gcc_unreachable ();
2402 /* If we don't have a decl for cexp create one. This is the
2403 friendliest fallback if the user calls __builtin_cexpi
2404 without full target C99 function support. */
2405 if (fn == NULL_TREE)
2407 tree fntype;
2408 const char *name = NULL;
2410 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2411 name = "cexpf";
2412 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2413 name = "cexp";
2414 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2415 name = "cexpl";
2417 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2418 fn = build_fn_decl (name, fntype);
2421 narg = fold_build2 (COMPLEX_EXPR, ctype,
2422 build_real (type, dconst0), arg);
2424 /* Make sure not to fold the cexp call again. */
2425 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2426 return expand_expr (build_call_nary (ctype, call, 1, narg),
2427 target, VOIDmode, EXPAND_NORMAL);
2430 /* Now build the proper return type. */
2431 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2432 make_tree (TREE_TYPE (arg), op2),
2433 make_tree (TREE_TYPE (arg), op1)),
2434 target, VOIDmode, EXPAND_NORMAL);
2437 /* Expand a call to one of the builtin rounding functions gcc defines
2438 as an extension (lfloor and lceil). As these are gcc extensions we
2439 do not need to worry about setting errno to EDOM.
2440 If expanding via optab fails, lower expression to (int)(floor(x)).
2441 EXP is the expression that is a call to the builtin function;
2442 if convenient, the result should be placed in TARGET. SUBTARGET may
2443 be used as the target for computing one of EXP's operands. */
2445 static rtx
2446 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2448 convert_optab builtin_optab;
2449 rtx op0, insns, tmp;
2450 tree fndecl = get_callee_fndecl (exp);
2451 enum built_in_function fallback_fn;
2452 tree fallback_fndecl;
2453 enum machine_mode mode;
2454 tree arg, narg;
2456 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2457 gcc_unreachable ();
2459 arg = CALL_EXPR_ARG (exp, 0);
2461 switch (DECL_FUNCTION_CODE (fndecl))
2463 CASE_FLT_FN (BUILT_IN_LCEIL):
2464 CASE_FLT_FN (BUILT_IN_LLCEIL):
2465 builtin_optab = lceil_optab;
2466 fallback_fn = BUILT_IN_CEIL;
2467 break;
2469 CASE_FLT_FN (BUILT_IN_LFLOOR):
2470 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2471 builtin_optab = lfloor_optab;
2472 fallback_fn = BUILT_IN_FLOOR;
2473 break;
2475 default:
2476 gcc_unreachable ();
2479 /* Make a suitable register to place result in. */
2480 mode = TYPE_MODE (TREE_TYPE (exp));
2482 target = gen_reg_rtx (mode);
2484 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2485 need to expand the argument again. This way, we will not perform
2486 side-effects more the once. */
2487 narg = builtin_save_expr (arg);
2488 if (narg != arg)
2490 arg = narg;
2491 exp = build_call_expr (fndecl, 1, arg);
2494 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2496 start_sequence ();
2498 /* Compute into TARGET. */
2499 if (expand_sfix_optab (target, op0, builtin_optab))
2501 /* Output the entire sequence. */
2502 insns = get_insns ();
2503 end_sequence ();
2504 emit_insn (insns);
2505 return target;
2508 /* If we were unable to expand via the builtin, stop the sequence
2509 (without outputting the insns). */
2510 end_sequence ();
2512 /* Fall back to floating point rounding optab. */
2513 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2515 /* For non-C99 targets we may end up without a fallback fndecl here
2516 if the user called __builtin_lfloor directly. In this case emit
2517 a call to the floor/ceil variants nevertheless. This should result
2518 in the best user experience for not full C99 targets. */
2519 if (fallback_fndecl == NULL_TREE)
2521 tree fntype;
2522 const char *name = NULL;
2524 switch (DECL_FUNCTION_CODE (fndecl))
2526 case BUILT_IN_LCEIL:
2527 case BUILT_IN_LLCEIL:
2528 name = "ceil";
2529 break;
2530 case BUILT_IN_LCEILF:
2531 case BUILT_IN_LLCEILF:
2532 name = "ceilf";
2533 break;
2534 case BUILT_IN_LCEILL:
2535 case BUILT_IN_LLCEILL:
2536 name = "ceill";
2537 break;
2538 case BUILT_IN_LFLOOR:
2539 case BUILT_IN_LLFLOOR:
2540 name = "floor";
2541 break;
2542 case BUILT_IN_LFLOORF:
2543 case BUILT_IN_LLFLOORF:
2544 name = "floorf";
2545 break;
2546 case BUILT_IN_LFLOORL:
2547 case BUILT_IN_LLFLOORL:
2548 name = "floorl";
2549 break;
2550 default:
2551 gcc_unreachable ();
2554 fntype = build_function_type_list (TREE_TYPE (arg),
2555 TREE_TYPE (arg), NULL_TREE);
2556 fallback_fndecl = build_fn_decl (name, fntype);
2559 exp = build_call_expr (fallback_fndecl, 1, arg);
2561 tmp = expand_normal (exp);
2563 /* Truncate the result of floating point optab to integer
2564 via expand_fix (). */
2565 target = gen_reg_rtx (mode);
2566 expand_fix (target, tmp, 0);
2568 return target;
2571 /* Expand a call to one of the builtin math functions doing integer
2572 conversion (lrint).
2573 Return 0 if a normal call should be emitted rather than expanding the
2574 function in-line. EXP is the expression that is a call to the builtin
2575 function; if convenient, the result should be placed in TARGET.
2576 SUBTARGET may be used as the target for computing one of EXP's operands. */
2578 static rtx
2579 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2581 convert_optab builtin_optab;
2582 rtx op0, insns;
2583 tree fndecl = get_callee_fndecl (exp);
2584 tree arg, narg;
2585 enum machine_mode mode;
2587 /* There's no easy way to detect the case we need to set EDOM. */
2588 if (flag_errno_math)
2589 return NULL_RTX;
2591 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2592 gcc_unreachable ();
2594 arg = CALL_EXPR_ARG (exp, 0);
2596 switch (DECL_FUNCTION_CODE (fndecl))
2598 CASE_FLT_FN (BUILT_IN_LRINT):
2599 CASE_FLT_FN (BUILT_IN_LLRINT):
2600 builtin_optab = lrint_optab; break;
2601 CASE_FLT_FN (BUILT_IN_LROUND):
2602 CASE_FLT_FN (BUILT_IN_LLROUND):
2603 builtin_optab = lround_optab; break;
2604 default:
2605 gcc_unreachable ();
2608 /* Make a suitable register to place result in. */
2609 mode = TYPE_MODE (TREE_TYPE (exp));
2611 target = gen_reg_rtx (mode);
2613 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2614 need to expand the argument again. This way, we will not perform
2615 side-effects more the once. */
2616 narg = builtin_save_expr (arg);
2617 if (narg != arg)
2619 arg = narg;
2620 exp = build_call_expr (fndecl, 1, arg);
2623 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2625 start_sequence ();
2627 if (expand_sfix_optab (target, op0, builtin_optab))
2629 /* Output the entire sequence. */
2630 insns = get_insns ();
2631 end_sequence ();
2632 emit_insn (insns);
2633 return target;
2636 /* If we were unable to expand via the builtin, stop the sequence
2637 (without outputting the insns) and call to the library function
2638 with the stabilized argument list. */
2639 end_sequence ();
2641 target = expand_call (exp, target, target == const0_rtx);
2643 return target;
2646 /* To evaluate powi(x,n), the floating point value x raised to the
2647 constant integer exponent n, we use a hybrid algorithm that
2648 combines the "window method" with look-up tables. For an
2649 introduction to exponentiation algorithms and "addition chains",
2650 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2651 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2652 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2653 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2655 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2656 multiplications to inline before calling the system library's pow
2657 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2658 so this default never requires calling pow, powf or powl. */
2660 #ifndef POWI_MAX_MULTS
2661 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2662 #endif
2664 /* The size of the "optimal power tree" lookup table. All
2665 exponents less than this value are simply looked up in the
2666 powi_table below. This threshold is also used to size the
2667 cache of pseudo registers that hold intermediate results. */
2668 #define POWI_TABLE_SIZE 256
2670 /* The size, in bits of the window, used in the "window method"
2671 exponentiation algorithm. This is equivalent to a radix of
2672 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2673 #define POWI_WINDOW_SIZE 3
2675 /* The following table is an efficient representation of an
2676 "optimal power tree". For each value, i, the corresponding
2677 value, j, in the table states than an optimal evaluation
2678 sequence for calculating pow(x,i) can be found by evaluating
2679 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2680 100 integers is given in Knuth's "Seminumerical algorithms". */
2682 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2684 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2685 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2686 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2687 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2688 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2689 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2690 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2691 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2692 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2693 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2694 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2695 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2696 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2697 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2698 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2699 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2700 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2701 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2702 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2703 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2704 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2705 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2706 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2707 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2708 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2709 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2710 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2711 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2712 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2713 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2714 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2715 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2719 /* Return the number of multiplications required to calculate
2720 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2721 subroutine of powi_cost. CACHE is an array indicating
2722 which exponents have already been calculated. */
2724 static int
2725 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2727 /* If we've already calculated this exponent, then this evaluation
2728 doesn't require any additional multiplications. */
2729 if (cache[n])
2730 return 0;
2732 cache[n] = true;
2733 return powi_lookup_cost (n - powi_table[n], cache)
2734 + powi_lookup_cost (powi_table[n], cache) + 1;
2737 /* Return the number of multiplications required to calculate
2738 powi(x,n) for an arbitrary x, given the exponent N. This
2739 function needs to be kept in sync with expand_powi below. */
2741 static int
2742 powi_cost (HOST_WIDE_INT n)
2744 bool cache[POWI_TABLE_SIZE];
2745 unsigned HOST_WIDE_INT digit;
2746 unsigned HOST_WIDE_INT val;
2747 int result;
2749 if (n == 0)
2750 return 0;
2752 /* Ignore the reciprocal when calculating the cost. */
2753 val = (n < 0) ? -n : n;
2755 /* Initialize the exponent cache. */
2756 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2757 cache[1] = true;
2759 result = 0;
2761 while (val >= POWI_TABLE_SIZE)
2763 if (val & 1)
2765 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2766 result += powi_lookup_cost (digit, cache)
2767 + POWI_WINDOW_SIZE + 1;
2768 val >>= POWI_WINDOW_SIZE;
2770 else
2772 val >>= 1;
2773 result++;
2777 return result + powi_lookup_cost (val, cache);
2780 /* Recursive subroutine of expand_powi. This function takes the array,
2781 CACHE, of already calculated exponents and an exponent N and returns
2782 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2784 static rtx
2785 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2787 unsigned HOST_WIDE_INT digit;
2788 rtx target, result;
2789 rtx op0, op1;
2791 if (n < POWI_TABLE_SIZE)
2793 if (cache[n])
2794 return cache[n];
2796 target = gen_reg_rtx (mode);
2797 cache[n] = target;
2799 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2800 op1 = expand_powi_1 (mode, powi_table[n], cache);
2802 else if (n & 1)
2804 target = gen_reg_rtx (mode);
2805 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2806 op0 = expand_powi_1 (mode, n - digit, cache);
2807 op1 = expand_powi_1 (mode, digit, cache);
2809 else
2811 target = gen_reg_rtx (mode);
2812 op0 = expand_powi_1 (mode, n >> 1, cache);
2813 op1 = op0;
2816 result = expand_mult (mode, op0, op1, target, 0);
2817 if (result != target)
2818 emit_move_insn (target, result);
2819 return target;
2822 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2823 floating point operand in mode MODE, and N is the exponent. This
2824 function needs to be kept in sync with powi_cost above. */
2826 static rtx
2827 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2829 unsigned HOST_WIDE_INT val;
2830 rtx cache[POWI_TABLE_SIZE];
2831 rtx result;
2833 if (n == 0)
2834 return CONST1_RTX (mode);
2836 val = (n < 0) ? -n : n;
2838 memset (cache, 0, sizeof (cache));
2839 cache[1] = x;
2841 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2843 /* If the original exponent was negative, reciprocate the result. */
2844 if (n < 0)
2845 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2846 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2848 return result;
2851 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2852 a normal call should be emitted rather than expanding the function
2853 in-line. EXP is the expression that is a call to the builtin
2854 function; if convenient, the result should be placed in TARGET. */
2856 static rtx
2857 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2859 tree arg0, arg1;
2860 tree fn, narg0;
2861 tree type = TREE_TYPE (exp);
2862 REAL_VALUE_TYPE cint, c, c2;
2863 HOST_WIDE_INT n;
2864 rtx op, op2;
2865 enum machine_mode mode = TYPE_MODE (type);
2867 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2868 return NULL_RTX;
2870 arg0 = CALL_EXPR_ARG (exp, 0);
2871 arg1 = CALL_EXPR_ARG (exp, 1);
2873 if (TREE_CODE (arg1) != REAL_CST
2874 || TREE_OVERFLOW (arg1))
2875 return expand_builtin_mathfn_2 (exp, target, subtarget);
2877 /* Handle constant exponents. */
2879 /* For integer valued exponents we can expand to an optimal multiplication
2880 sequence using expand_powi. */
2881 c = TREE_REAL_CST (arg1);
2882 n = real_to_integer (&c);
2883 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2884 if (real_identical (&c, &cint)
2885 && ((n >= -1 && n <= 2)
2886 || (flag_unsafe_math_optimizations
2887 && !optimize_size
2888 && powi_cost (n) <= POWI_MAX_MULTS)))
2890 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2891 if (n != 1)
2893 op = force_reg (mode, op);
2894 op = expand_powi (op, mode, n);
2896 return op;
2899 narg0 = builtin_save_expr (arg0);
2901 /* If the exponent is not integer valued, check if it is half of an integer.
2902 In this case we can expand to sqrt (x) * x**(n/2). */
2903 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2904 if (fn != NULL_TREE)
2906 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2907 n = real_to_integer (&c2);
2908 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2909 if (real_identical (&c2, &cint)
2910 && ((flag_unsafe_math_optimizations
2911 && !optimize_size
2912 && powi_cost (n/2) <= POWI_MAX_MULTS)
2913 || n == 1))
2915 tree call_expr = build_call_expr (fn, 1, narg0);
2916 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2917 if (n != 1)
2919 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2920 op2 = force_reg (mode, op2);
2921 op2 = expand_powi (op2, mode, abs (n / 2));
2922 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2923 0, OPTAB_LIB_WIDEN);
2924 /* If the original exponent was negative, reciprocate the
2925 result. */
2926 if (n < 0)
2927 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2928 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2930 return op;
2934 /* Try if the exponent is a third of an integer. In this case
2935 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2936 different from pow (x, 1./3.) due to rounding and behavior
2937 with negative x we need to constrain this transformation to
2938 unsafe math and positive x or finite math. */
2939 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2940 if (fn != NULL_TREE
2941 && flag_unsafe_math_optimizations
2942 && (tree_expr_nonnegative_p (arg0)
2943 || !HONOR_NANS (mode)))
2945 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2946 real_round (&c2, mode, &c2);
2947 n = real_to_integer (&c2);
2948 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2949 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2950 real_convert (&c2, mode, &c2);
2951 if (real_identical (&c2, &c)
2952 && ((!optimize_size
2953 && powi_cost (n/3) <= POWI_MAX_MULTS)
2954 || n == 1))
2956 tree call_expr = build_call_expr (fn, 1,narg0);
2957 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2958 if (abs (n) % 3 == 2)
2959 op = expand_simple_binop (mode, MULT, op, op, op,
2960 0, OPTAB_LIB_WIDEN);
2961 if (n != 1)
2963 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2964 op2 = force_reg (mode, op2);
2965 op2 = expand_powi (op2, mode, abs (n / 3));
2966 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2967 0, OPTAB_LIB_WIDEN);
2968 /* If the original exponent was negative, reciprocate the
2969 result. */
2970 if (n < 0)
2971 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2972 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2974 return op;
2978 /* Fall back to optab expansion. */
2979 return expand_builtin_mathfn_2 (exp, target, subtarget);
2982 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2983 a normal call should be emitted rather than expanding the function
2984 in-line. EXP is the expression that is a call to the builtin
2985 function; if convenient, the result should be placed in TARGET. */
2987 static rtx
2988 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2990 tree arg0, arg1;
2991 rtx op0, op1;
2992 enum machine_mode mode;
2993 enum machine_mode mode2;
2995 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2996 return NULL_RTX;
2998 arg0 = CALL_EXPR_ARG (exp, 0);
2999 arg1 = CALL_EXPR_ARG (exp, 1);
3000 mode = TYPE_MODE (TREE_TYPE (exp));
3002 /* Handle constant power. */
3004 if (TREE_CODE (arg1) == INTEGER_CST
3005 && !TREE_OVERFLOW (arg1))
3007 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3009 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3010 Otherwise, check the number of multiplications required. */
3011 if ((TREE_INT_CST_HIGH (arg1) == 0
3012 || TREE_INT_CST_HIGH (arg1) == -1)
3013 && ((n >= -1 && n <= 2)
3014 || (! optimize_size
3015 && powi_cost (n) <= POWI_MAX_MULTS)))
3017 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3018 op0 = force_reg (mode, op0);
3019 return expand_powi (op0, mode, n);
3023 /* Emit a libcall to libgcc. */
3025 /* Mode of the 2nd argument must match that of an int. */
3026 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3028 if (target == NULL_RTX)
3029 target = gen_reg_rtx (mode);
3031 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3032 if (GET_MODE (op0) != mode)
3033 op0 = convert_to_mode (mode, op0, 0);
3034 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3035 if (GET_MODE (op1) != mode2)
3036 op1 = convert_to_mode (mode2, op1, 0);
3038 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
3039 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3040 op0, mode, op1, mode2);
3042 return target;
3045 /* Expand expression EXP which is a call to the strlen builtin. Return
3046 NULL_RTX if we failed the caller should emit a normal call, otherwise
3047 try to get the result in TARGET, if convenient. */
3049 static rtx
3050 expand_builtin_strlen (tree exp, rtx target,
3051 enum machine_mode target_mode)
3053 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3054 return NULL_RTX;
3055 else
3057 rtx pat;
3058 tree len;
3059 tree src = CALL_EXPR_ARG (exp, 0);
3060 rtx result, src_reg, char_rtx, before_strlen;
3061 enum machine_mode insn_mode = target_mode, char_mode;
3062 enum insn_code icode = CODE_FOR_nothing;
3063 int align;
3065 /* If the length can be computed at compile-time, return it. */
3066 len = c_strlen (src, 0);
3067 if (len)
3068 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3070 /* If the length can be computed at compile-time and is constant
3071 integer, but there are side-effects in src, evaluate
3072 src for side-effects, then return len.
3073 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3074 can be optimized into: i++; x = 3; */
3075 len = c_strlen (src, 1);
3076 if (len && TREE_CODE (len) == INTEGER_CST)
3078 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3079 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3082 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3084 /* If SRC is not a pointer type, don't do this operation inline. */
3085 if (align == 0)
3086 return NULL_RTX;
3088 /* Bail out if we can't compute strlen in the right mode. */
3089 while (insn_mode != VOIDmode)
3091 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
3092 if (icode != CODE_FOR_nothing)
3093 break;
3095 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3097 if (insn_mode == VOIDmode)
3098 return NULL_RTX;
3100 /* Make a place to write the result of the instruction. */
3101 result = target;
3102 if (! (result != 0
3103 && REG_P (result)
3104 && GET_MODE (result) == insn_mode
3105 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3106 result = gen_reg_rtx (insn_mode);
3108 /* Make a place to hold the source address. We will not expand
3109 the actual source until we are sure that the expansion will
3110 not fail -- there are trees that cannot be expanded twice. */
3111 src_reg = gen_reg_rtx (Pmode);
3113 /* Mark the beginning of the strlen sequence so we can emit the
3114 source operand later. */
3115 before_strlen = get_last_insn ();
3117 char_rtx = const0_rtx;
3118 char_mode = insn_data[(int) icode].operand[2].mode;
3119 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3120 char_mode))
3121 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3123 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3124 char_rtx, GEN_INT (align));
3125 if (! pat)
3126 return NULL_RTX;
3127 emit_insn (pat);
3129 /* Now that we are assured of success, expand the source. */
3130 start_sequence ();
3131 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3132 if (pat != src_reg)
3133 emit_move_insn (src_reg, pat);
3134 pat = get_insns ();
3135 end_sequence ();
3137 if (before_strlen)
3138 emit_insn_after (pat, before_strlen);
3139 else
3140 emit_insn_before (pat, get_insns ());
3142 /* Return the value in the proper mode for this function. */
3143 if (GET_MODE (result) == target_mode)
3144 target = result;
3145 else if (target != 0)
3146 convert_move (target, result, 0);
3147 else
3148 target = convert_to_mode (target_mode, result, 0);
3150 return target;
3154 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3155 caller should emit a normal call, otherwise try to get the result
3156 in TARGET, if convenient (and in mode MODE if that's convenient). */
3158 static rtx
3159 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3161 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3163 tree type = TREE_TYPE (exp);
3164 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3165 CALL_EXPR_ARG (exp, 1), type);
3166 if (result)
3167 return expand_expr (result, target, mode, EXPAND_NORMAL);
3169 return NULL_RTX;
3172 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3173 caller should emit a normal call, otherwise try to get the result
3174 in TARGET, if convenient (and in mode MODE if that's convenient). */
3176 static rtx
3177 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3179 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3181 tree type = TREE_TYPE (exp);
3182 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3183 CALL_EXPR_ARG (exp, 1), type);
3184 if (result)
3185 return expand_expr (result, target, mode, EXPAND_NORMAL);
3187 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3189 return NULL_RTX;
3192 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3193 caller should emit a normal call, otherwise try to get the result
3194 in TARGET, if convenient (and in mode MODE if that's convenient). */
3196 static rtx
3197 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3199 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3201 tree type = TREE_TYPE (exp);
3202 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3203 CALL_EXPR_ARG (exp, 1), type);
3204 if (result)
3205 return expand_expr (result, target, mode, EXPAND_NORMAL);
3207 return NULL_RTX;
3210 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3211 caller should emit a normal call, otherwise try to get the result
3212 in TARGET, if convenient (and in mode MODE if that's convenient). */
3214 static rtx
3215 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3217 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3219 tree type = TREE_TYPE (exp);
3220 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3221 CALL_EXPR_ARG (exp, 1), type);
3222 if (result)
3223 return expand_expr (result, target, mode, EXPAND_NORMAL);
3225 return NULL_RTX;
3228 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3229 bytes from constant string DATA + OFFSET and return it as target
3230 constant. */
3232 static rtx
3233 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3234 enum machine_mode mode)
3236 const char *str = (const char *) data;
3238 gcc_assert (offset >= 0
3239 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3240 <= strlen (str) + 1));
3242 return c_readstr (str + offset, mode);
3245 /* Expand a call EXP to the memcpy builtin.
3246 Return NULL_RTX if we failed, the caller should emit a normal call,
3247 otherwise try to get the result in TARGET, if convenient (and in
3248 mode MODE if that's convenient). */
3250 static rtx
3251 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3253 tree fndecl = get_callee_fndecl (exp);
3255 if (!validate_arglist (exp,
3256 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3257 return NULL_RTX;
3258 else
3260 tree dest = CALL_EXPR_ARG (exp, 0);
3261 tree src = CALL_EXPR_ARG (exp, 1);
3262 tree len = CALL_EXPR_ARG (exp, 2);
3263 const char *src_str;
3264 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3265 unsigned int dest_align
3266 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3267 rtx dest_mem, src_mem, dest_addr, len_rtx;
3268 tree result = fold_builtin_memory_op (dest, src, len,
3269 TREE_TYPE (TREE_TYPE (fndecl)),
3270 false, /*endp=*/0);
3271 HOST_WIDE_INT expected_size = -1;
3272 unsigned int expected_align = 0;
3274 if (result)
3276 while (TREE_CODE (result) == COMPOUND_EXPR)
3278 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3279 EXPAND_NORMAL);
3280 result = TREE_OPERAND (result, 1);
3282 return expand_expr (result, target, mode, EXPAND_NORMAL);
3285 /* If DEST is not a pointer type, call the normal function. */
3286 if (dest_align == 0)
3287 return NULL_RTX;
3289 /* If either SRC is not a pointer type, don't do this
3290 operation in-line. */
3291 if (src_align == 0)
3292 return NULL_RTX;
3294 stringop_block_profile (exp, &expected_align, &expected_size);
3295 if (expected_align < dest_align)
3296 expected_align = dest_align;
3297 dest_mem = get_memory_rtx (dest, len);
3298 set_mem_align (dest_mem, dest_align);
3299 len_rtx = expand_normal (len);
3300 src_str = c_getstr (src);
3302 /* If SRC is a string constant and block move would be done
3303 by pieces, we can avoid loading the string from memory
3304 and only stored the computed constants. */
3305 if (src_str
3306 && GET_CODE (len_rtx) == CONST_INT
3307 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3308 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3309 (void *) src_str, dest_align))
3311 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3312 builtin_memcpy_read_str,
3313 (void *) src_str, dest_align, 0);
3314 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3315 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3316 return dest_mem;
3319 src_mem = get_memory_rtx (src, len);
3320 set_mem_align (src_mem, src_align);
3322 /* Copy word part most expediently. */
3323 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3324 CALL_EXPR_TAILCALL (exp)
3325 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3326 expected_align, expected_size);
3328 if (dest_addr == 0)
3330 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3331 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3333 return dest_addr;
3337 /* Expand a call EXP to the mempcpy builtin.
3338 Return NULL_RTX if we failed; the caller should emit a normal call,
3339 otherwise try to get the result in TARGET, if convenient (and in
3340 mode MODE if that's convenient). If ENDP is 0 return the
3341 destination pointer, if ENDP is 1 return the end pointer ala
3342 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3343 stpcpy. */
3345 static rtx
3346 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3348 if (!validate_arglist (exp,
3349 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3350 return NULL_RTX;
3351 else
3353 tree dest = CALL_EXPR_ARG (exp, 0);
3354 tree src = CALL_EXPR_ARG (exp, 1);
3355 tree len = CALL_EXPR_ARG (exp, 2);
3356 return expand_builtin_mempcpy_args (dest, src, len,
3357 TREE_TYPE (exp),
3358 target, mode, /*endp=*/ 1);
3362 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3363 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3364 so that this can also be called without constructing an actual CALL_EXPR.
3365 TYPE is the return type of the call. The other arguments and return value
3366 are the same as for expand_builtin_mempcpy. */
3368 static rtx
3369 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3370 rtx target, enum machine_mode mode, int endp)
3372 /* If return value is ignored, transform mempcpy into memcpy. */
3373 if (target == const0_rtx)
3375 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3377 if (!fn)
3378 return NULL_RTX;
3380 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3381 target, mode, EXPAND_NORMAL);
3383 else
3385 const char *src_str;
3386 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3387 unsigned int dest_align
3388 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3389 rtx dest_mem, src_mem, len_rtx;
3390 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3392 if (result)
3394 while (TREE_CODE (result) == COMPOUND_EXPR)
3396 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3397 EXPAND_NORMAL);
3398 result = TREE_OPERAND (result, 1);
3400 return expand_expr (result, target, mode, EXPAND_NORMAL);
3403 /* If either SRC or DEST is not a pointer type, don't do this
3404 operation in-line. */
3405 if (dest_align == 0 || src_align == 0)
3406 return NULL_RTX;
3408 /* If LEN is not constant, call the normal function. */
3409 if (! host_integerp (len, 1))
3410 return NULL_RTX;
3412 len_rtx = expand_normal (len);
3413 src_str = c_getstr (src);
3415 /* If SRC is a string constant and block move would be done
3416 by pieces, we can avoid loading the string from memory
3417 and only stored the computed constants. */
3418 if (src_str
3419 && GET_CODE (len_rtx) == CONST_INT
3420 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3421 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3422 (void *) src_str, dest_align))
3424 dest_mem = get_memory_rtx (dest, len);
3425 set_mem_align (dest_mem, dest_align);
3426 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3427 builtin_memcpy_read_str,
3428 (void *) src_str, dest_align, endp);
3429 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3430 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3431 return dest_mem;
3434 if (GET_CODE (len_rtx) == CONST_INT
3435 && can_move_by_pieces (INTVAL (len_rtx),
3436 MIN (dest_align, src_align)))
3438 dest_mem = get_memory_rtx (dest, len);
3439 set_mem_align (dest_mem, dest_align);
3440 src_mem = get_memory_rtx (src, len);
3441 set_mem_align (src_mem, src_align);
3442 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3443 MIN (dest_align, src_align), endp);
3444 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3445 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3446 return dest_mem;
3449 return NULL_RTX;
3453 /* Expand expression EXP, which is a call to the memmove builtin. Return
3454 NULL_RTX if we failed; the caller should emit a normal call. */
3456 static rtx
3457 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3459 if (!validate_arglist (exp,
3460 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3461 return NULL_RTX;
3462 else
3464 tree dest = CALL_EXPR_ARG (exp, 0);
3465 tree src = CALL_EXPR_ARG (exp, 1);
3466 tree len = CALL_EXPR_ARG (exp, 2);
3467 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3468 target, mode, ignore);
3472 /* Helper function to do the actual work for expand_builtin_memmove. The
3473 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3474 so that this can also be called without constructing an actual CALL_EXPR.
3475 TYPE is the return type of the call. The other arguments and return value
3476 are the same as for expand_builtin_memmove. */
3478 static rtx
3479 expand_builtin_memmove_args (tree dest, tree src, tree len,
3480 tree type, rtx target, enum machine_mode mode,
3481 int ignore)
3483 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3485 if (result)
3487 STRIP_TYPE_NOPS (result);
3488 while (TREE_CODE (result) == COMPOUND_EXPR)
3490 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3491 EXPAND_NORMAL);
3492 result = TREE_OPERAND (result, 1);
3494 return expand_expr (result, target, mode, EXPAND_NORMAL);
3497 /* Otherwise, call the normal function. */
3498 return NULL_RTX;
3501 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3502 NULL_RTX if we failed the caller should emit a normal call. */
3504 static rtx
3505 expand_builtin_bcopy (tree exp, int ignore)
3507 tree type = TREE_TYPE (exp);
3508 tree src, dest, size;
3510 if (!validate_arglist (exp,
3511 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3512 return NULL_RTX;
3514 src = CALL_EXPR_ARG (exp, 0);
3515 dest = CALL_EXPR_ARG (exp, 1);
3516 size = CALL_EXPR_ARG (exp, 2);
3518 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3519 This is done this way so that if it isn't expanded inline, we fall
3520 back to calling bcopy instead of memmove. */
3521 return expand_builtin_memmove_args (dest, src,
3522 fold_convert (sizetype, size),
3523 type, const0_rtx, VOIDmode,
3524 ignore);
3527 #ifndef HAVE_movstr
3528 # define HAVE_movstr 0
3529 # define CODE_FOR_movstr CODE_FOR_nothing
3530 #endif
3532 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3533 we failed, the caller should emit a normal call, otherwise try to
3534 get the result in TARGET, if convenient. If ENDP is 0 return the
3535 destination pointer, if ENDP is 1 return the end pointer ala
3536 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3537 stpcpy. */
3539 static rtx
3540 expand_movstr (tree dest, tree src, rtx target, int endp)
3542 rtx end;
3543 rtx dest_mem;
3544 rtx src_mem;
3545 rtx insn;
3546 const struct insn_data * data;
3548 if (!HAVE_movstr)
3549 return NULL_RTX;
3551 dest_mem = get_memory_rtx (dest, NULL);
3552 src_mem = get_memory_rtx (src, NULL);
3553 if (!endp)
3555 target = force_reg (Pmode, XEXP (dest_mem, 0));
3556 dest_mem = replace_equiv_address (dest_mem, target);
3557 end = gen_reg_rtx (Pmode);
3559 else
3561 if (target == 0 || target == const0_rtx)
3563 end = gen_reg_rtx (Pmode);
3564 if (target == 0)
3565 target = end;
3567 else
3568 end = target;
3571 data = insn_data + CODE_FOR_movstr;
3573 if (data->operand[0].mode != VOIDmode)
3574 end = gen_lowpart (data->operand[0].mode, end);
3576 insn = data->genfun (end, dest_mem, src_mem);
3578 gcc_assert (insn);
3580 emit_insn (insn);
3582 /* movstr is supposed to set end to the address of the NUL
3583 terminator. If the caller requested a mempcpy-like return value,
3584 adjust it. */
3585 if (endp == 1 && target != const0_rtx)
3587 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3588 emit_move_insn (target, force_operand (tem, NULL_RTX));
3591 return target;
3594 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3595 NULL_RTX if we failed the caller should emit a normal call, otherwise
3596 try to get the result in TARGET, if convenient (and in mode MODE if that's
3597 convenient). */
3599 static rtx
3600 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3602 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3604 tree dest = CALL_EXPR_ARG (exp, 0);
3605 tree src = CALL_EXPR_ARG (exp, 1);
3606 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3608 return NULL_RTX;
3611 /* Helper function to do the actual work for expand_builtin_strcpy. The
3612 arguments to the builtin_strcpy call DEST and SRC are broken out
3613 so that this can also be called without constructing an actual CALL_EXPR.
3614 The other arguments and return value are the same as for
3615 expand_builtin_strcpy. */
3617 static rtx
3618 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3619 rtx target, enum machine_mode mode)
3621 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3622 if (result)
3623 return expand_expr (result, target, mode, EXPAND_NORMAL);
3624 return expand_movstr (dest, src, target, /*endp=*/0);
3628 /* Expand a call EXP to the stpcpy builtin.
3629 Return NULL_RTX if we failed the caller should emit a normal call,
3630 otherwise try to get the result in TARGET, if convenient (and in
3631 mode MODE if that's convenient). */
3633 static rtx
3634 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3636 tree dst, src;
3638 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3639 return NULL_RTX;
3641 dst = CALL_EXPR_ARG (exp, 0);
3642 src = CALL_EXPR_ARG (exp, 1);
3644 /* If return value is ignored, transform stpcpy into strcpy. */
3645 if (target == const0_rtx)
3647 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3648 if (!fn)
3649 return NULL_RTX;
3651 return expand_expr (build_call_expr (fn, 2, dst, src),
3652 target, mode, EXPAND_NORMAL);
3654 else
3656 tree len, lenp1;
3657 rtx ret;
3659 /* Ensure we get an actual string whose length can be evaluated at
3660 compile-time, not an expression containing a string. This is
3661 because the latter will potentially produce pessimized code
3662 when used to produce the return value. */
3663 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3664 return expand_movstr (dst, src, target, /*endp=*/2);
3666 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3667 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3668 target, mode, /*endp=*/2);
3670 if (ret)
3671 return ret;
3673 if (TREE_CODE (len) == INTEGER_CST)
3675 rtx len_rtx = expand_normal (len);
3677 if (GET_CODE (len_rtx) == CONST_INT)
3679 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3680 dst, src, target, mode);
3682 if (ret)
3684 if (! target)
3686 if (mode != VOIDmode)
3687 target = gen_reg_rtx (mode);
3688 else
3689 target = gen_reg_rtx (GET_MODE (ret));
3691 if (GET_MODE (target) != GET_MODE (ret))
3692 ret = gen_lowpart (GET_MODE (target), ret);
3694 ret = plus_constant (ret, INTVAL (len_rtx));
3695 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3696 gcc_assert (ret);
3698 return target;
3703 return expand_movstr (dst, src, target, /*endp=*/2);
3707 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3708 bytes from constant string DATA + OFFSET and return it as target
3709 constant. */
3711 static rtx
3712 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3713 enum machine_mode mode)
3715 const char *str = (const char *) data;
3717 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3718 return const0_rtx;
3720 return c_readstr (str + offset, mode);
3723 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3724 NULL_RTX if we failed the caller should emit a normal call. */
3726 static rtx
3727 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3729 tree fndecl = get_callee_fndecl (exp);
3731 if (validate_arglist (exp,
3732 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3734 tree dest = CALL_EXPR_ARG (exp, 0);
3735 tree src = CALL_EXPR_ARG (exp, 1);
3736 tree len = CALL_EXPR_ARG (exp, 2);
3737 tree slen = c_strlen (src, 1);
3738 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3740 if (result)
3742 while (TREE_CODE (result) == COMPOUND_EXPR)
3744 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3745 EXPAND_NORMAL);
3746 result = TREE_OPERAND (result, 1);
3748 return expand_expr (result, target, mode, EXPAND_NORMAL);
3751 /* We must be passed a constant len and src parameter. */
3752 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3753 return NULL_RTX;
3755 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3757 /* We're required to pad with trailing zeros if the requested
3758 len is greater than strlen(s2)+1. In that case try to
3759 use store_by_pieces, if it fails, punt. */
3760 if (tree_int_cst_lt (slen, len))
3762 unsigned int dest_align
3763 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3764 const char *p = c_getstr (src);
3765 rtx dest_mem;
3767 if (!p || dest_align == 0 || !host_integerp (len, 1)
3768 || !can_store_by_pieces (tree_low_cst (len, 1),
3769 builtin_strncpy_read_str,
3770 (void *) p, dest_align))
3771 return NULL_RTX;
3773 dest_mem = get_memory_rtx (dest, len);
3774 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3775 builtin_strncpy_read_str,
3776 (void *) p, dest_align, 0);
3777 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3778 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3779 return dest_mem;
3782 return NULL_RTX;
3785 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3786 bytes from constant string DATA + OFFSET and return it as target
3787 constant. */
3790 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3791 enum machine_mode mode)
3793 const char *c = (const char *) data;
3794 char *p = alloca (GET_MODE_SIZE (mode));
3796 memset (p, *c, GET_MODE_SIZE (mode));
3798 return c_readstr (p, mode);
3801 /* Callback routine for store_by_pieces. Return the RTL of a register
3802 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3803 char value given in the RTL register data. For example, if mode is
3804 4 bytes wide, return the RTL for 0x01010101*data. */
3806 static rtx
3807 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3808 enum machine_mode mode)
3810 rtx target, coeff;
3811 size_t size;
3812 char *p;
3814 size = GET_MODE_SIZE (mode);
3815 if (size == 1)
3816 return (rtx) data;
3818 p = alloca (size);
3819 memset (p, 1, size);
3820 coeff = c_readstr (p, mode);
3822 target = convert_to_mode (mode, (rtx) data, 1);
3823 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3824 return force_reg (mode, target);
3827 /* Expand expression EXP, which is a call to the memset builtin. Return
3828 NULL_RTX if we failed the caller should emit a normal call, otherwise
3829 try to get the result in TARGET, if convenient (and in mode MODE if that's
3830 convenient). */
3832 static rtx
3833 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3835 if (!validate_arglist (exp,
3836 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3837 return NULL_RTX;
3838 else
3840 tree dest = CALL_EXPR_ARG (exp, 0);
3841 tree val = CALL_EXPR_ARG (exp, 1);
3842 tree len = CALL_EXPR_ARG (exp, 2);
3843 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3847 /* Helper function to do the actual work for expand_builtin_memset. The
3848 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3849 so that this can also be called without constructing an actual CALL_EXPR.
3850 The other arguments and return value are the same as for
3851 expand_builtin_memset. */
3853 static rtx
3854 expand_builtin_memset_args (tree dest, tree val, tree len,
3855 rtx target, enum machine_mode mode, tree orig_exp)
3857 tree fndecl, fn;
3858 enum built_in_function fcode;
3859 char c;
3860 unsigned int dest_align;
3861 rtx dest_mem, dest_addr, len_rtx;
3862 HOST_WIDE_INT expected_size = -1;
3863 unsigned int expected_align = 0;
3865 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3867 /* If DEST is not a pointer type, don't do this operation in-line. */
3868 if (dest_align == 0)
3869 return NULL_RTX;
3871 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3872 if (expected_align < dest_align)
3873 expected_align = dest_align;
3875 /* If the LEN parameter is zero, return DEST. */
3876 if (integer_zerop (len))
3878 /* Evaluate and ignore VAL in case it has side-effects. */
3879 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3880 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3883 /* Stabilize the arguments in case we fail. */
3884 dest = builtin_save_expr (dest);
3885 val = builtin_save_expr (val);
3886 len = builtin_save_expr (len);
3888 len_rtx = expand_normal (len);
3889 dest_mem = get_memory_rtx (dest, len);
3891 if (TREE_CODE (val) != INTEGER_CST)
3893 rtx val_rtx;
3895 val_rtx = expand_normal (val);
3896 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3897 val_rtx, 0);
3899 /* Assume that we can memset by pieces if we can store
3900 * the coefficients by pieces (in the required modes).
3901 * We can't pass builtin_memset_gen_str as that emits RTL. */
3902 c = 1;
3903 if (host_integerp (len, 1)
3904 && !(optimize_size && tree_low_cst (len, 1) > 1)
3905 && can_store_by_pieces (tree_low_cst (len, 1),
3906 builtin_memset_read_str, &c, dest_align))
3908 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3909 val_rtx);
3910 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3911 builtin_memset_gen_str, val_rtx, dest_align, 0);
3913 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3914 dest_align, expected_align,
3915 expected_size))
3916 goto do_libcall;
3918 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3919 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3920 return dest_mem;
3923 if (target_char_cast (val, &c))
3924 goto do_libcall;
3926 if (c)
3928 if (host_integerp (len, 1)
3929 && !(optimize_size && tree_low_cst (len, 1) > 1)
3930 && can_store_by_pieces (tree_low_cst (len, 1),
3931 builtin_memset_read_str, &c, dest_align))
3932 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3933 builtin_memset_read_str, &c, dest_align, 0);
3934 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3935 dest_align, expected_align,
3936 expected_size))
3937 goto do_libcall;
3939 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3940 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3941 return dest_mem;
3944 set_mem_align (dest_mem, dest_align);
3945 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3946 CALL_EXPR_TAILCALL (orig_exp)
3947 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3948 expected_align, expected_size);
3950 if (dest_addr == 0)
3952 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3953 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3956 return dest_addr;
3958 do_libcall:
3959 fndecl = get_callee_fndecl (orig_exp);
3960 fcode = DECL_FUNCTION_CODE (fndecl);
3961 if (fcode == BUILT_IN_MEMSET)
3962 fn = build_call_expr (fndecl, 3, dest, val, len);
3963 else if (fcode == BUILT_IN_BZERO)
3964 fn = build_call_expr (fndecl, 2, dest, len);
3965 else
3966 gcc_unreachable ();
3967 if (TREE_CODE (fn) == CALL_EXPR)
3968 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3969 return expand_call (fn, target, target == const0_rtx);
3972 /* Expand expression EXP, which is a call to the bzero builtin. Return
3973 NULL_RTX if we failed the caller should emit a normal call. */
3975 static rtx
3976 expand_builtin_bzero (tree exp)
3978 tree dest, size;
3980 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3981 return NULL_RTX;
3983 dest = CALL_EXPR_ARG (exp, 0);
3984 size = CALL_EXPR_ARG (exp, 1);
3986 /* New argument list transforming bzero(ptr x, int y) to
3987 memset(ptr x, int 0, size_t y). This is done this way
3988 so that if it isn't expanded inline, we fallback to
3989 calling bzero instead of memset. */
3991 return expand_builtin_memset_args (dest, integer_zero_node,
3992 fold_convert (sizetype, size),
3993 const0_rtx, VOIDmode, exp);
3996 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
3997 caller should emit a normal call, otherwise try to get the result
3998 in TARGET, if convenient (and in mode MODE if that's convenient). */
4000 static rtx
4001 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4003 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4004 INTEGER_TYPE, VOID_TYPE))
4006 tree type = TREE_TYPE (exp);
4007 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4008 CALL_EXPR_ARG (exp, 1),
4009 CALL_EXPR_ARG (exp, 2), type);
4010 if (result)
4011 return expand_expr (result, target, mode, EXPAND_NORMAL);
4013 return NULL_RTX;
4016 /* Expand expression EXP, which is a call to the memcmp built-in function.
4017 Return NULL_RTX if we failed and the
4018 caller should emit a normal call, otherwise try to get the result in
4019 TARGET, if convenient (and in mode MODE, if that's convenient). */
4021 static rtx
4022 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4024 if (!validate_arglist (exp,
4025 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4026 return NULL_RTX;
4027 else
4029 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4030 CALL_EXPR_ARG (exp, 1),
4031 CALL_EXPR_ARG (exp, 2));
4032 if (result)
4033 return expand_expr (result, target, mode, EXPAND_NORMAL);
4036 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4038 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4039 rtx result;
4040 rtx insn;
4041 tree arg1 = CALL_EXPR_ARG (exp, 0);
4042 tree arg2 = CALL_EXPR_ARG (exp, 1);
4043 tree len = CALL_EXPR_ARG (exp, 2);
4045 int arg1_align
4046 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4047 int arg2_align
4048 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4049 enum machine_mode insn_mode;
4051 #ifdef HAVE_cmpmemsi
4052 if (HAVE_cmpmemsi)
4053 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4054 else
4055 #endif
4056 #ifdef HAVE_cmpstrnsi
4057 if (HAVE_cmpstrnsi)
4058 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4059 else
4060 #endif
4061 return NULL_RTX;
4063 /* If we don't have POINTER_TYPE, call the function. */
4064 if (arg1_align == 0 || arg2_align == 0)
4065 return NULL_RTX;
4067 /* Make a place to write the result of the instruction. */
4068 result = target;
4069 if (! (result != 0
4070 && REG_P (result) && GET_MODE (result) == insn_mode
4071 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4072 result = gen_reg_rtx (insn_mode);
4074 arg1_rtx = get_memory_rtx (arg1, len);
4075 arg2_rtx = get_memory_rtx (arg2, len);
4076 arg3_rtx = expand_normal (len);
4078 /* Set MEM_SIZE as appropriate. */
4079 if (GET_CODE (arg3_rtx) == CONST_INT)
4081 set_mem_size (arg1_rtx, arg3_rtx);
4082 set_mem_size (arg2_rtx, arg3_rtx);
4085 #ifdef HAVE_cmpmemsi
4086 if (HAVE_cmpmemsi)
4087 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4088 GEN_INT (MIN (arg1_align, arg2_align)));
4089 else
4090 #endif
4091 #ifdef HAVE_cmpstrnsi
4092 if (HAVE_cmpstrnsi)
4093 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4094 GEN_INT (MIN (arg1_align, arg2_align)));
4095 else
4096 #endif
4097 gcc_unreachable ();
4099 if (insn)
4100 emit_insn (insn);
4101 else
4102 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4103 TYPE_MODE (integer_type_node), 3,
4104 XEXP (arg1_rtx, 0), Pmode,
4105 XEXP (arg2_rtx, 0), Pmode,
4106 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4107 TYPE_UNSIGNED (sizetype)),
4108 TYPE_MODE (sizetype));
4110 /* Return the value in the proper mode for this function. */
4111 mode = TYPE_MODE (TREE_TYPE (exp));
4112 if (GET_MODE (result) == mode)
4113 return result;
4114 else if (target != 0)
4116 convert_move (target, result, 0);
4117 return target;
4119 else
4120 return convert_to_mode (mode, result, 0);
4122 #endif
4124 return NULL_RTX;
4127 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4128 if we failed the caller should emit a normal call, otherwise try to get
4129 the result in TARGET, if convenient. */
4131 static rtx
4132 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4134 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4135 return NULL_RTX;
4136 else
4138 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4139 CALL_EXPR_ARG (exp, 1));
4140 if (result)
4141 return expand_expr (result, target, mode, EXPAND_NORMAL);
4144 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4145 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4146 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4148 rtx arg1_rtx, arg2_rtx;
4149 rtx result, insn = NULL_RTX;
4150 tree fndecl, fn;
4151 tree arg1 = CALL_EXPR_ARG (exp, 0);
4152 tree arg2 = CALL_EXPR_ARG (exp, 1);
4154 int arg1_align
4155 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4156 int arg2_align
4157 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4159 /* If we don't have POINTER_TYPE, call the function. */
4160 if (arg1_align == 0 || arg2_align == 0)
4161 return NULL_RTX;
4163 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4164 arg1 = builtin_save_expr (arg1);
4165 arg2 = builtin_save_expr (arg2);
4167 arg1_rtx = get_memory_rtx (arg1, NULL);
4168 arg2_rtx = get_memory_rtx (arg2, NULL);
4170 #ifdef HAVE_cmpstrsi
4171 /* Try to call cmpstrsi. */
4172 if (HAVE_cmpstrsi)
4174 enum machine_mode insn_mode
4175 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4177 /* Make a place to write the result of the instruction. */
4178 result = target;
4179 if (! (result != 0
4180 && REG_P (result) && GET_MODE (result) == insn_mode
4181 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4182 result = gen_reg_rtx (insn_mode);
4184 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4185 GEN_INT (MIN (arg1_align, arg2_align)));
4187 #endif
4188 #ifdef HAVE_cmpstrnsi
4189 /* Try to determine at least one length and call cmpstrnsi. */
4190 if (!insn && HAVE_cmpstrnsi)
4192 tree len;
4193 rtx arg3_rtx;
4195 enum machine_mode insn_mode
4196 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4197 tree len1 = c_strlen (arg1, 1);
4198 tree len2 = c_strlen (arg2, 1);
4200 if (len1)
4201 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4202 if (len2)
4203 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4205 /* If we don't have a constant length for the first, use the length
4206 of the second, if we know it. We don't require a constant for
4207 this case; some cost analysis could be done if both are available
4208 but neither is constant. For now, assume they're equally cheap,
4209 unless one has side effects. If both strings have constant lengths,
4210 use the smaller. */
4212 if (!len1)
4213 len = len2;
4214 else if (!len2)
4215 len = len1;
4216 else if (TREE_SIDE_EFFECTS (len1))
4217 len = len2;
4218 else if (TREE_SIDE_EFFECTS (len2))
4219 len = len1;
4220 else if (TREE_CODE (len1) != INTEGER_CST)
4221 len = len2;
4222 else if (TREE_CODE (len2) != INTEGER_CST)
4223 len = len1;
4224 else if (tree_int_cst_lt (len1, len2))
4225 len = len1;
4226 else
4227 len = len2;
4229 /* If both arguments have side effects, we cannot optimize. */
4230 if (!len || TREE_SIDE_EFFECTS (len))
4231 goto do_libcall;
4233 arg3_rtx = expand_normal (len);
4235 /* Make a place to write the result of the instruction. */
4236 result = target;
4237 if (! (result != 0
4238 && REG_P (result) && GET_MODE (result) == insn_mode
4239 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4240 result = gen_reg_rtx (insn_mode);
4242 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4243 GEN_INT (MIN (arg1_align, arg2_align)));
4245 #endif
4247 if (insn)
4249 emit_insn (insn);
4251 /* Return the value in the proper mode for this function. */
4252 mode = TYPE_MODE (TREE_TYPE (exp));
4253 if (GET_MODE (result) == mode)
4254 return result;
4255 if (target == 0)
4256 return convert_to_mode (mode, result, 0);
4257 convert_move (target, result, 0);
4258 return target;
4261 /* Expand the library call ourselves using a stabilized argument
4262 list to avoid re-evaluating the function's arguments twice. */
4263 #ifdef HAVE_cmpstrnsi
4264 do_libcall:
4265 #endif
4266 fndecl = get_callee_fndecl (exp);
4267 fn = build_call_expr (fndecl, 2, arg1, arg2);
4268 if (TREE_CODE (fn) == CALL_EXPR)
4269 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4270 return expand_call (fn, target, target == const0_rtx);
4272 #endif
4273 return NULL_RTX;
4276 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4277 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4278 the result in TARGET, if convenient. */
4280 static rtx
4281 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4283 if (!validate_arglist (exp,
4284 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4285 return NULL_RTX;
4286 else
4288 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4289 CALL_EXPR_ARG (exp, 1),
4290 CALL_EXPR_ARG (exp, 2));
4291 if (result)
4292 return expand_expr (result, target, mode, EXPAND_NORMAL);
4295 /* If c_strlen can determine an expression for one of the string
4296 lengths, and it doesn't have side effects, then emit cmpstrnsi
4297 using length MIN(strlen(string)+1, arg3). */
4298 #ifdef HAVE_cmpstrnsi
4299 if (HAVE_cmpstrnsi)
4301 tree len, len1, len2;
4302 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4303 rtx result, insn;
4304 tree fndecl, fn;
4305 tree arg1 = CALL_EXPR_ARG (exp, 0);
4306 tree arg2 = CALL_EXPR_ARG (exp, 1);
4307 tree arg3 = CALL_EXPR_ARG (exp, 2);
4309 int arg1_align
4310 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4311 int arg2_align
4312 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4313 enum machine_mode insn_mode
4314 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4316 len1 = c_strlen (arg1, 1);
4317 len2 = c_strlen (arg2, 1);
4319 if (len1)
4320 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4321 if (len2)
4322 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4324 /* If we don't have a constant length for the first, use the length
4325 of the second, if we know it. We don't require a constant for
4326 this case; some cost analysis could be done if both are available
4327 but neither is constant. For now, assume they're equally cheap,
4328 unless one has side effects. If both strings have constant lengths,
4329 use the smaller. */
4331 if (!len1)
4332 len = len2;
4333 else if (!len2)
4334 len = len1;
4335 else if (TREE_SIDE_EFFECTS (len1))
4336 len = len2;
4337 else if (TREE_SIDE_EFFECTS (len2))
4338 len = len1;
4339 else if (TREE_CODE (len1) != INTEGER_CST)
4340 len = len2;
4341 else if (TREE_CODE (len2) != INTEGER_CST)
4342 len = len1;
4343 else if (tree_int_cst_lt (len1, len2))
4344 len = len1;
4345 else
4346 len = len2;
4348 /* If both arguments have side effects, we cannot optimize. */
4349 if (!len || TREE_SIDE_EFFECTS (len))
4350 return NULL_RTX;
4352 /* The actual new length parameter is MIN(len,arg3). */
4353 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4354 fold_convert (TREE_TYPE (len), arg3));
4356 /* If we don't have POINTER_TYPE, call the function. */
4357 if (arg1_align == 0 || arg2_align == 0)
4358 return NULL_RTX;
4360 /* Make a place to write the result of the instruction. */
4361 result = target;
4362 if (! (result != 0
4363 && REG_P (result) && GET_MODE (result) == insn_mode
4364 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4365 result = gen_reg_rtx (insn_mode);
4367 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4368 arg1 = builtin_save_expr (arg1);
4369 arg2 = builtin_save_expr (arg2);
4370 len = builtin_save_expr (len);
4372 arg1_rtx = get_memory_rtx (arg1, len);
4373 arg2_rtx = get_memory_rtx (arg2, len);
4374 arg3_rtx = expand_normal (len);
4375 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4376 GEN_INT (MIN (arg1_align, arg2_align)));
4377 if (insn)
4379 emit_insn (insn);
4381 /* Return the value in the proper mode for this function. */
4382 mode = TYPE_MODE (TREE_TYPE (exp));
4383 if (GET_MODE (result) == mode)
4384 return result;
4385 if (target == 0)
4386 return convert_to_mode (mode, result, 0);
4387 convert_move (target, result, 0);
4388 return target;
4391 /* Expand the library call ourselves using a stabilized argument
4392 list to avoid re-evaluating the function's arguments twice. */
4393 fndecl = get_callee_fndecl (exp);
4394 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4395 if (TREE_CODE (fn) == CALL_EXPR)
4396 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4397 return expand_call (fn, target, target == const0_rtx);
4399 #endif
4400 return NULL_RTX;
4403 /* Expand expression EXP, which is a call to the strcat builtin.
4404 Return NULL_RTX if we failed the caller should emit a normal call,
4405 otherwise try to get the result in TARGET, if convenient. */
4407 static rtx
4408 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4410 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4411 return NULL_RTX;
4412 else
4414 tree dst = CALL_EXPR_ARG (exp, 0);
4415 tree src = CALL_EXPR_ARG (exp, 1);
4416 const char *p = c_getstr (src);
4418 /* If the string length is zero, return the dst parameter. */
4419 if (p && *p == '\0')
4420 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4422 if (!optimize_size)
4424 /* See if we can store by pieces into (dst + strlen(dst)). */
4425 tree newsrc, newdst,
4426 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4427 rtx insns;
4429 /* Stabilize the argument list. */
4430 newsrc = builtin_save_expr (src);
4431 dst = builtin_save_expr (dst);
4433 start_sequence ();
4435 /* Create strlen (dst). */
4436 newdst = build_call_expr (strlen_fn, 1, dst);
4437 /* Create (dst p+ strlen (dst)). */
4439 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4440 newdst = builtin_save_expr (newdst);
4442 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4444 end_sequence (); /* Stop sequence. */
4445 return NULL_RTX;
4448 /* Output the entire sequence. */
4449 insns = get_insns ();
4450 end_sequence ();
4451 emit_insn (insns);
4453 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4456 return NULL_RTX;
4460 /* Expand expression EXP, which is a call to the strncat builtin.
4461 Return NULL_RTX if we failed the caller should emit a normal call,
4462 otherwise try to get the result in TARGET, if convenient. */
4464 static rtx
4465 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4467 if (validate_arglist (exp,
4468 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4470 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4471 CALL_EXPR_ARG (exp, 1),
4472 CALL_EXPR_ARG (exp, 2));
4473 if (result)
4474 return expand_expr (result, target, mode, EXPAND_NORMAL);
4476 return NULL_RTX;
4479 /* Expand expression EXP, which is a call to the strspn builtin.
4480 Return NULL_RTX if we failed the caller should emit a normal call,
4481 otherwise try to get the result in TARGET, if convenient. */
4483 static rtx
4484 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4486 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4488 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4489 CALL_EXPR_ARG (exp, 1));
4490 if (result)
4491 return expand_expr (result, target, mode, EXPAND_NORMAL);
4493 return NULL_RTX;
4496 /* Expand expression EXP, which is a call to the strcspn builtin.
4497 Return NULL_RTX if we failed the caller should emit a normal call,
4498 otherwise try to get the result in TARGET, if convenient. */
4500 static rtx
4501 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4503 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4505 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4506 CALL_EXPR_ARG (exp, 1));
4507 if (result)
4508 return expand_expr (result, target, mode, EXPAND_NORMAL);
4510 return NULL_RTX;
4513 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4514 if that's convenient. */
4517 expand_builtin_saveregs (void)
4519 rtx val, seq;
4521 /* Don't do __builtin_saveregs more than once in a function.
4522 Save the result of the first call and reuse it. */
4523 if (saveregs_value != 0)
4524 return saveregs_value;
4526 /* When this function is called, it means that registers must be
4527 saved on entry to this function. So we migrate the call to the
4528 first insn of this function. */
4530 start_sequence ();
4532 /* Do whatever the machine needs done in this case. */
4533 val = targetm.calls.expand_builtin_saveregs ();
4535 seq = get_insns ();
4536 end_sequence ();
4538 saveregs_value = val;
4540 /* Put the insns after the NOTE that starts the function. If this
4541 is inside a start_sequence, make the outer-level insn chain current, so
4542 the code is placed at the start of the function. */
4543 push_topmost_sequence ();
4544 emit_insn_after (seq, entry_of_function ());
4545 pop_topmost_sequence ();
4547 return val;
4550 /* __builtin_args_info (N) returns word N of the arg space info
4551 for the current function. The number and meanings of words
4552 is controlled by the definition of CUMULATIVE_ARGS. */
4554 static rtx
4555 expand_builtin_args_info (tree exp)
4557 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4558 int *word_ptr = (int *) &current_function_args_info;
4560 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4562 if (call_expr_nargs (exp) != 0)
4564 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4565 error ("argument of %<__builtin_args_info%> must be constant");
4566 else
4568 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4570 if (wordnum < 0 || wordnum >= nwords)
4571 error ("argument of %<__builtin_args_info%> out of range");
4572 else
4573 return GEN_INT (word_ptr[wordnum]);
4576 else
4577 error ("missing argument in %<__builtin_args_info%>");
4579 return const0_rtx;
4582 /* Expand a call to __builtin_next_arg. */
4584 static rtx
4585 expand_builtin_next_arg (void)
4587 /* Checking arguments is already done in fold_builtin_next_arg
4588 that must be called before this function. */
4589 return expand_binop (ptr_mode, add_optab,
4590 current_function_internal_arg_pointer,
4591 current_function_arg_offset_rtx,
4592 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4595 /* Make it easier for the backends by protecting the valist argument
4596 from multiple evaluations. */
4598 static tree
4599 stabilize_va_list (tree valist, int needs_lvalue)
4601 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4603 if (TREE_SIDE_EFFECTS (valist))
4604 valist = save_expr (valist);
4606 /* For this case, the backends will be expecting a pointer to
4607 TREE_TYPE (va_list_type_node), but it's possible we've
4608 actually been given an array (an actual va_list_type_node).
4609 So fix it. */
4610 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4612 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4613 valist = build_fold_addr_expr_with_type (valist, p1);
4616 else
4618 tree pt;
4620 if (! needs_lvalue)
4622 if (! TREE_SIDE_EFFECTS (valist))
4623 return valist;
4625 pt = build_pointer_type (va_list_type_node);
4626 valist = fold_build1 (ADDR_EXPR, pt, valist);
4627 TREE_SIDE_EFFECTS (valist) = 1;
4630 if (TREE_SIDE_EFFECTS (valist))
4631 valist = save_expr (valist);
4632 valist = build_fold_indirect_ref (valist);
4635 return valist;
4638 /* The "standard" definition of va_list is void*. */
4640 tree
4641 std_build_builtin_va_list (void)
4643 return ptr_type_node;
4646 /* The "standard" implementation of va_start: just assign `nextarg' to
4647 the variable. */
4649 void
4650 std_expand_builtin_va_start (tree valist, rtx nextarg)
4652 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4653 convert_move (va_r, nextarg, 0);
4656 /* Expand EXP, a call to __builtin_va_start. */
4658 static rtx
4659 expand_builtin_va_start (tree exp)
4661 rtx nextarg;
4662 tree valist;
4664 if (call_expr_nargs (exp) < 2)
4666 error ("too few arguments to function %<va_start%>");
4667 return const0_rtx;
4670 if (fold_builtin_next_arg (exp, true))
4671 return const0_rtx;
4673 nextarg = expand_builtin_next_arg ();
4674 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4676 #ifdef EXPAND_BUILTIN_VA_START
4677 EXPAND_BUILTIN_VA_START (valist, nextarg);
4678 #else
4679 std_expand_builtin_va_start (valist, nextarg);
4680 #endif
4682 return const0_rtx;
4685 /* The "standard" implementation of va_arg: read the value from the
4686 current (padded) address and increment by the (padded) size. */
4688 tree
4689 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4691 tree addr, t, type_size, rounded_size, valist_tmp;
4692 unsigned HOST_WIDE_INT align, boundary;
4693 bool indirect;
4695 #ifdef ARGS_GROW_DOWNWARD
4696 /* All of the alignment and movement below is for args-grow-up machines.
4697 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4698 implement their own specialized gimplify_va_arg_expr routines. */
4699 gcc_unreachable ();
4700 #endif
4702 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4703 if (indirect)
4704 type = build_pointer_type (type);
4706 align = PARM_BOUNDARY / BITS_PER_UNIT;
4707 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4709 /* Hoist the valist value into a temporary for the moment. */
4710 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4712 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4713 requires greater alignment, we must perform dynamic alignment. */
4714 if (boundary > align
4715 && !integer_zerop (TYPE_SIZE (type)))
4717 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4718 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4719 valist_tmp, size_int (boundary - 1)));
4720 gimplify_and_add (t, pre_p);
4722 t = fold_convert (sizetype, valist_tmp);
4723 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4724 fold_convert (TREE_TYPE (valist),
4725 fold_build2 (BIT_AND_EXPR, sizetype, t,
4726 size_int (-boundary))));
4727 gimplify_and_add (t, pre_p);
4729 else
4730 boundary = align;
4732 /* If the actual alignment is less than the alignment of the type,
4733 adjust the type accordingly so that we don't assume strict alignment
4734 when deferencing the pointer. */
4735 boundary *= BITS_PER_UNIT;
4736 if (boundary < TYPE_ALIGN (type))
4738 type = build_variant_type_copy (type);
4739 TYPE_ALIGN (type) = boundary;
4742 /* Compute the rounded size of the type. */
4743 type_size = size_in_bytes (type);
4744 rounded_size = round_up (type_size, align);
4746 /* Reduce rounded_size so it's sharable with the postqueue. */
4747 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4749 /* Get AP. */
4750 addr = valist_tmp;
4751 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4753 /* Small args are padded downward. */
4754 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4755 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4756 size_binop (MINUS_EXPR, rounded_size, type_size));
4757 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4760 /* Compute new value for AP. */
4761 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4762 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4763 gimplify_and_add (t, pre_p);
4765 addr = fold_convert (build_pointer_type (type), addr);
4767 if (indirect)
4768 addr = build_va_arg_indirect_ref (addr);
4770 return build_va_arg_indirect_ref (addr);
4773 /* Build an indirect-ref expression over the given TREE, which represents a
4774 piece of a va_arg() expansion. */
4775 tree
4776 build_va_arg_indirect_ref (tree addr)
4778 addr = build_fold_indirect_ref (addr);
4780 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4781 mf_mark (addr);
4783 return addr;
4786 /* Return a dummy expression of type TYPE in order to keep going after an
4787 error. */
4789 static tree
4790 dummy_object (tree type)
4792 tree t = build_int_cst (build_pointer_type (type), 0);
4793 return build1 (INDIRECT_REF, type, t);
4796 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4797 builtin function, but a very special sort of operator. */
4799 enum gimplify_status
4800 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4802 tree promoted_type, want_va_type, have_va_type;
4803 tree valist = TREE_OPERAND (*expr_p, 0);
4804 tree type = TREE_TYPE (*expr_p);
4805 tree t;
4807 /* Verify that valist is of the proper type. */
4808 want_va_type = va_list_type_node;
4809 have_va_type = TREE_TYPE (valist);
4811 if (have_va_type == error_mark_node)
4812 return GS_ERROR;
4814 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4816 /* If va_list is an array type, the argument may have decayed
4817 to a pointer type, e.g. by being passed to another function.
4818 In that case, unwrap both types so that we can compare the
4819 underlying records. */
4820 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4821 || POINTER_TYPE_P (have_va_type))
4823 want_va_type = TREE_TYPE (want_va_type);
4824 have_va_type = TREE_TYPE (have_va_type);
4828 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4830 error ("first argument to %<va_arg%> not of type %<va_list%>");
4831 return GS_ERROR;
4834 /* Generate a diagnostic for requesting data of a type that cannot
4835 be passed through `...' due to type promotion at the call site. */
4836 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4837 != type)
4839 static bool gave_help;
4841 /* Unfortunately, this is merely undefined, rather than a constraint
4842 violation, so we cannot make this an error. If this call is never
4843 executed, the program is still strictly conforming. */
4844 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4845 type, promoted_type);
4846 if (! gave_help)
4848 gave_help = true;
4849 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4850 promoted_type, type);
4853 /* We can, however, treat "undefined" any way we please.
4854 Call abort to encourage the user to fix the program. */
4855 inform ("if this code is reached, the program will abort");
4856 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4857 append_to_statement_list (t, pre_p);
4859 /* This is dead code, but go ahead and finish so that the
4860 mode of the result comes out right. */
4861 *expr_p = dummy_object (type);
4862 return GS_ALL_DONE;
4864 else
4866 /* Make it easier for the backends by protecting the valist argument
4867 from multiple evaluations. */
4868 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4870 /* For this case, the backends will be expecting a pointer to
4871 TREE_TYPE (va_list_type_node), but it's possible we've
4872 actually been given an array (an actual va_list_type_node).
4873 So fix it. */
4874 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4876 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4877 valist = build_fold_addr_expr_with_type (valist, p1);
4879 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4881 else
4882 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4884 if (!targetm.gimplify_va_arg_expr)
4885 /* FIXME:Once most targets are converted we should merely
4886 assert this is non-null. */
4887 return GS_ALL_DONE;
4889 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4890 return GS_OK;
4894 /* Expand EXP, a call to __builtin_va_end. */
4896 static rtx
4897 expand_builtin_va_end (tree exp)
4899 tree valist = CALL_EXPR_ARG (exp, 0);
4901 /* Evaluate for side effects, if needed. I hate macros that don't
4902 do that. */
4903 if (TREE_SIDE_EFFECTS (valist))
4904 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4906 return const0_rtx;
4909 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4910 builtin rather than just as an assignment in stdarg.h because of the
4911 nastiness of array-type va_list types. */
4913 static rtx
4914 expand_builtin_va_copy (tree exp)
4916 tree dst, src, t;
4918 dst = CALL_EXPR_ARG (exp, 0);
4919 src = CALL_EXPR_ARG (exp, 1);
4921 dst = stabilize_va_list (dst, 1);
4922 src = stabilize_va_list (src, 0);
4924 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4926 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4927 TREE_SIDE_EFFECTS (t) = 1;
4928 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4930 else
4932 rtx dstb, srcb, size;
4934 /* Evaluate to pointers. */
4935 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4936 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4937 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4938 VOIDmode, EXPAND_NORMAL);
4940 dstb = convert_memory_address (Pmode, dstb);
4941 srcb = convert_memory_address (Pmode, srcb);
4943 /* "Dereference" to BLKmode memories. */
4944 dstb = gen_rtx_MEM (BLKmode, dstb);
4945 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4946 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4947 srcb = gen_rtx_MEM (BLKmode, srcb);
4948 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4949 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4951 /* Copy. */
4952 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4955 return const0_rtx;
4958 /* Expand a call to one of the builtin functions __builtin_frame_address or
4959 __builtin_return_address. */
4961 static rtx
4962 expand_builtin_frame_address (tree fndecl, tree exp)
4964 /* The argument must be a nonnegative integer constant.
4965 It counts the number of frames to scan up the stack.
4966 The value is the return address saved in that frame. */
4967 if (call_expr_nargs (exp) == 0)
4968 /* Warning about missing arg was already issued. */
4969 return const0_rtx;
4970 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4972 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4973 error ("invalid argument to %<__builtin_frame_address%>");
4974 else
4975 error ("invalid argument to %<__builtin_return_address%>");
4976 return const0_rtx;
4978 else
4980 rtx tem
4981 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4982 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4984 /* Some ports cannot access arbitrary stack frames. */
4985 if (tem == NULL)
4987 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4988 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4989 else
4990 warning (0, "unsupported argument to %<__builtin_return_address%>");
4991 return const0_rtx;
4994 /* For __builtin_frame_address, return what we've got. */
4995 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4996 return tem;
4998 if (!REG_P (tem)
4999 && ! CONSTANT_P (tem))
5000 tem = copy_to_mode_reg (Pmode, tem);
5001 return tem;
5005 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5006 we failed and the caller should emit a normal call, otherwise try to get
5007 the result in TARGET, if convenient. */
5009 static rtx
5010 expand_builtin_alloca (tree exp, rtx target)
5012 rtx op0;
5013 rtx result;
5015 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5016 should always expand to function calls. These can be intercepted
5017 in libmudflap. */
5018 if (flag_mudflap)
5019 return NULL_RTX;
5021 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5022 return NULL_RTX;
5024 /* Compute the argument. */
5025 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5027 /* Allocate the desired space. */
5028 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5029 result = convert_memory_address (ptr_mode, result);
5031 return result;
5034 /* Expand a call to a bswap builtin with argument ARG0. MODE
5035 is the mode to expand with. */
5037 static rtx
5038 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5040 enum machine_mode mode;
5041 tree arg;
5042 rtx op0;
5044 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5045 return NULL_RTX;
5047 arg = CALL_EXPR_ARG (exp, 0);
5048 mode = TYPE_MODE (TREE_TYPE (arg));
5049 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5051 target = expand_unop (mode, bswap_optab, op0, target, 1);
5053 gcc_assert (target);
5055 return convert_to_mode (mode, target, 0);
5058 /* Expand a call to a unary builtin in EXP.
5059 Return NULL_RTX if a normal call should be emitted rather than expanding the
5060 function in-line. If convenient, the result should be placed in TARGET.
5061 SUBTARGET may be used as the target for computing one of EXP's operands. */
5063 static rtx
5064 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5065 rtx subtarget, optab op_optab)
5067 rtx op0;
5069 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5070 return NULL_RTX;
5072 /* Compute the argument. */
5073 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5074 VOIDmode, EXPAND_NORMAL);
5075 /* Compute op, into TARGET if possible.
5076 Set TARGET to wherever the result comes back. */
5077 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5078 op_optab, op0, target, 1);
5079 gcc_assert (target);
5081 return convert_to_mode (target_mode, target, 0);
5084 /* If the string passed to fputs is a constant and is one character
5085 long, we attempt to transform this call into __builtin_fputc(). */
5087 static rtx
5088 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5090 /* Verify the arguments in the original call. */
5091 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5093 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5094 CALL_EXPR_ARG (exp, 1),
5095 (target == const0_rtx),
5096 unlocked, NULL_TREE);
5097 if (result)
5098 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5100 return NULL_RTX;
5103 /* Expand a call to __builtin_expect. We just return our argument
5104 as the builtin_expect semantic should've been already executed by
5105 tree branch prediction pass. */
5107 static rtx
5108 expand_builtin_expect (tree exp, rtx target)
5110 tree arg, c;
5112 if (call_expr_nargs (exp) < 2)
5113 return const0_rtx;
5114 arg = CALL_EXPR_ARG (exp, 0);
5115 c = CALL_EXPR_ARG (exp, 1);
5117 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5118 /* When guessing was done, the hints should be already stripped away. */
5119 gcc_assert (!flag_guess_branch_prob
5120 || optimize == 0 || errorcount || sorrycount);
5121 return target;
5124 void
5125 expand_builtin_trap (void)
5127 #ifdef HAVE_trap
5128 if (HAVE_trap)
5129 emit_insn (gen_trap ());
5130 else
5131 #endif
5132 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5133 emit_barrier ();
5136 /* Expand EXP, a call to fabs, fabsf or fabsl.
5137 Return NULL_RTX if a normal call should be emitted rather than expanding
5138 the function inline. If convenient, the result should be placed
5139 in TARGET. SUBTARGET may be used as the target for computing
5140 the operand. */
5142 static rtx
5143 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5145 enum machine_mode mode;
5146 tree arg;
5147 rtx op0;
5149 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5150 return NULL_RTX;
5152 arg = CALL_EXPR_ARG (exp, 0);
5153 mode = TYPE_MODE (TREE_TYPE (arg));
5154 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5155 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5158 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5159 Return NULL is a normal call should be emitted rather than expanding the
5160 function inline. If convenient, the result should be placed in TARGET.
5161 SUBTARGET may be used as the target for computing the operand. */
5163 static rtx
5164 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5166 rtx op0, op1;
5167 tree arg;
5169 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5170 return NULL_RTX;
5172 arg = CALL_EXPR_ARG (exp, 0);
5173 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5175 arg = CALL_EXPR_ARG (exp, 1);
5176 op1 = expand_normal (arg);
5178 return expand_copysign (op0, op1, target);
5181 /* Create a new constant string literal and return a char* pointer to it.
5182 The STRING_CST value is the LEN characters at STR. */
5183 tree
5184 build_string_literal (int len, const char *str)
5186 tree t, elem, index, type;
5188 t = build_string (len, str);
5189 elem = build_type_variant (char_type_node, 1, 0);
5190 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5191 type = build_array_type (elem, index);
5192 TREE_TYPE (t) = type;
5193 TREE_CONSTANT (t) = 1;
5194 TREE_INVARIANT (t) = 1;
5195 TREE_READONLY (t) = 1;
5196 TREE_STATIC (t) = 1;
5198 type = build_pointer_type (type);
5199 t = build1 (ADDR_EXPR, type, t);
5201 type = build_pointer_type (elem);
5202 t = build1 (NOP_EXPR, type, t);
5203 return t;
5206 /* Expand EXP, a call to printf or printf_unlocked.
5207 Return NULL_RTX if a normal call should be emitted rather than transforming
5208 the function inline. If convenient, the result should be placed in
5209 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5210 call. */
5211 static rtx
5212 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5213 bool unlocked)
5215 /* If we're using an unlocked function, assume the other unlocked
5216 functions exist explicitly. */
5217 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5218 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5219 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5220 : implicit_built_in_decls[BUILT_IN_PUTS];
5221 const char *fmt_str;
5222 tree fn = 0;
5223 tree fmt, arg;
5224 int nargs = call_expr_nargs (exp);
5226 /* If the return value is used, don't do the transformation. */
5227 if (target != const0_rtx)
5228 return NULL_RTX;
5230 /* Verify the required arguments in the original call. */
5231 if (nargs == 0)
5232 return NULL_RTX;
5233 fmt = CALL_EXPR_ARG (exp, 0);
5234 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5235 return NULL_RTX;
5237 /* Check whether the format is a literal string constant. */
5238 fmt_str = c_getstr (fmt);
5239 if (fmt_str == NULL)
5240 return NULL_RTX;
5242 if (!init_target_chars ())
5243 return NULL_RTX;
5245 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5246 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5248 if ((nargs != 2)
5249 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5250 return NULL_RTX;
5251 if (fn_puts)
5252 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5254 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5255 else if (strcmp (fmt_str, target_percent_c) == 0)
5257 if ((nargs != 2)
5258 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5259 return NULL_RTX;
5260 if (fn_putchar)
5261 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5263 else
5265 /* We can't handle anything else with % args or %% ... yet. */
5266 if (strchr (fmt_str, target_percent))
5267 return NULL_RTX;
5269 if (nargs > 1)
5270 return NULL_RTX;
5272 /* If the format specifier was "", printf does nothing. */
5273 if (fmt_str[0] == '\0')
5274 return const0_rtx;
5275 /* If the format specifier has length of 1, call putchar. */
5276 if (fmt_str[1] == '\0')
5278 /* Given printf("c"), (where c is any one character,)
5279 convert "c"[0] to an int and pass that to the replacement
5280 function. */
5281 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5282 if (fn_putchar)
5283 fn = build_call_expr (fn_putchar, 1, arg);
5285 else
5287 /* If the format specifier was "string\n", call puts("string"). */
5288 size_t len = strlen (fmt_str);
5289 if ((unsigned char)fmt_str[len - 1] == target_newline)
5291 /* Create a NUL-terminated string that's one char shorter
5292 than the original, stripping off the trailing '\n'. */
5293 char *newstr = alloca (len);
5294 memcpy (newstr, fmt_str, len - 1);
5295 newstr[len - 1] = 0;
5296 arg = build_string_literal (len, newstr);
5297 if (fn_puts)
5298 fn = build_call_expr (fn_puts, 1, arg);
5300 else
5301 /* We'd like to arrange to call fputs(string,stdout) here,
5302 but we need stdout and don't have a way to get it yet. */
5303 return NULL_RTX;
5307 if (!fn)
5308 return NULL_RTX;
5309 if (TREE_CODE (fn) == CALL_EXPR)
5310 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5311 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5314 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5315 Return NULL_RTX if a normal call should be emitted rather than transforming
5316 the function inline. If convenient, the result should be placed in
5317 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5318 call. */
5319 static rtx
5320 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5321 bool unlocked)
5323 /* If we're using an unlocked function, assume the other unlocked
5324 functions exist explicitly. */
5325 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5326 : implicit_built_in_decls[BUILT_IN_FPUTC];
5327 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5328 : implicit_built_in_decls[BUILT_IN_FPUTS];
5329 const char *fmt_str;
5330 tree fn = 0;
5331 tree fmt, fp, arg;
5332 int nargs = call_expr_nargs (exp);
5334 /* If the return value is used, don't do the transformation. */
5335 if (target != const0_rtx)
5336 return NULL_RTX;
5338 /* Verify the required arguments in the original call. */
5339 if (nargs < 2)
5340 return NULL_RTX;
5341 fp = CALL_EXPR_ARG (exp, 0);
5342 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5343 return NULL_RTX;
5344 fmt = CALL_EXPR_ARG (exp, 1);
5345 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5346 return NULL_RTX;
5348 /* Check whether the format is a literal string constant. */
5349 fmt_str = c_getstr (fmt);
5350 if (fmt_str == NULL)
5351 return NULL_RTX;
5353 if (!init_target_chars ())
5354 return NULL_RTX;
5356 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5357 if (strcmp (fmt_str, target_percent_s) == 0)
5359 if ((nargs != 3)
5360 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5361 return NULL_RTX;
5362 arg = CALL_EXPR_ARG (exp, 2);
5363 if (fn_fputs)
5364 fn = build_call_expr (fn_fputs, 2, arg, fp);
5366 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5367 else if (strcmp (fmt_str, target_percent_c) == 0)
5369 if ((nargs != 3)
5370 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5371 return NULL_RTX;
5372 arg = CALL_EXPR_ARG (exp, 2);
5373 if (fn_fputc)
5374 fn = build_call_expr (fn_fputc, 2, arg, fp);
5376 else
5378 /* We can't handle anything else with % args or %% ... yet. */
5379 if (strchr (fmt_str, target_percent))
5380 return NULL_RTX;
5382 if (nargs > 2)
5383 return NULL_RTX;
5385 /* If the format specifier was "", fprintf does nothing. */
5386 if (fmt_str[0] == '\0')
5388 /* Evaluate and ignore FILE* argument for side-effects. */
5389 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5390 return const0_rtx;
5393 /* When "string" doesn't contain %, replace all cases of
5394 fprintf(stream,string) with fputs(string,stream). The fputs
5395 builtin will take care of special cases like length == 1. */
5396 if (fn_fputs)
5397 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5400 if (!fn)
5401 return NULL_RTX;
5402 if (TREE_CODE (fn) == CALL_EXPR)
5403 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5404 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5407 /* Expand a call EXP to sprintf. Return NULL_RTX if
5408 a normal call should be emitted rather than expanding the function
5409 inline. If convenient, the result should be placed in TARGET with
5410 mode MODE. */
5412 static rtx
5413 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5415 tree dest, fmt;
5416 const char *fmt_str;
5417 int nargs = call_expr_nargs (exp);
5419 /* Verify the required arguments in the original call. */
5420 if (nargs < 2)
5421 return NULL_RTX;
5422 dest = CALL_EXPR_ARG (exp, 0);
5423 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5424 return NULL_RTX;
5425 fmt = CALL_EXPR_ARG (exp, 0);
5426 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5427 return NULL_RTX;
5429 /* Check whether the format is a literal string constant. */
5430 fmt_str = c_getstr (fmt);
5431 if (fmt_str == NULL)
5432 return NULL_RTX;
5434 if (!init_target_chars ())
5435 return NULL_RTX;
5437 /* If the format doesn't contain % args or %%, use strcpy. */
5438 if (strchr (fmt_str, target_percent) == 0)
5440 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5441 tree exp;
5443 if ((nargs > 2) || ! fn)
5444 return NULL_RTX;
5445 expand_expr (build_call_expr (fn, 2, dest, fmt),
5446 const0_rtx, VOIDmode, EXPAND_NORMAL);
5447 if (target == const0_rtx)
5448 return const0_rtx;
5449 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5450 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5452 /* If the format is "%s", use strcpy if the result isn't used. */
5453 else if (strcmp (fmt_str, target_percent_s) == 0)
5455 tree fn, arg, len;
5456 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5458 if (! fn)
5459 return NULL_RTX;
5460 if (nargs != 3)
5461 return NULL_RTX;
5462 arg = CALL_EXPR_ARG (exp, 2);
5463 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5464 return NULL_RTX;
5466 if (target != const0_rtx)
5468 len = c_strlen (arg, 1);
5469 if (! len || TREE_CODE (len) != INTEGER_CST)
5470 return NULL_RTX;
5472 else
5473 len = NULL_TREE;
5475 expand_expr (build_call_expr (fn, 2, dest, arg),
5476 const0_rtx, VOIDmode, EXPAND_NORMAL);
5478 if (target == const0_rtx)
5479 return const0_rtx;
5480 return expand_expr (len, target, mode, EXPAND_NORMAL);
5483 return NULL_RTX;
5486 /* Expand a call to either the entry or exit function profiler. */
5488 static rtx
5489 expand_builtin_profile_func (bool exitp)
5491 rtx this, which;
5493 this = DECL_RTL (current_function_decl);
5494 gcc_assert (MEM_P (this));
5495 this = XEXP (this, 0);
5497 if (exitp)
5498 which = profile_function_exit_libfunc;
5499 else
5500 which = profile_function_entry_libfunc;
5502 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5503 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5505 Pmode);
5507 return const0_rtx;
5510 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5512 static rtx
5513 round_trampoline_addr (rtx tramp)
5515 rtx temp, addend, mask;
5517 /* If we don't need too much alignment, we'll have been guaranteed
5518 proper alignment by get_trampoline_type. */
5519 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5520 return tramp;
5522 /* Round address up to desired boundary. */
5523 temp = gen_reg_rtx (Pmode);
5524 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5525 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5527 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5528 temp, 0, OPTAB_LIB_WIDEN);
5529 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5530 temp, 0, OPTAB_LIB_WIDEN);
5532 return tramp;
5535 static rtx
5536 expand_builtin_init_trampoline (tree exp)
5538 tree t_tramp, t_func, t_chain;
5539 rtx r_tramp, r_func, r_chain;
5540 #ifdef TRAMPOLINE_TEMPLATE
5541 rtx blktramp;
5542 #endif
5544 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5545 POINTER_TYPE, VOID_TYPE))
5546 return NULL_RTX;
5548 t_tramp = CALL_EXPR_ARG (exp, 0);
5549 t_func = CALL_EXPR_ARG (exp, 1);
5550 t_chain = CALL_EXPR_ARG (exp, 2);
5552 r_tramp = expand_normal (t_tramp);
5553 r_func = expand_normal (t_func);
5554 r_chain = expand_normal (t_chain);
5556 /* Generate insns to initialize the trampoline. */
5557 r_tramp = round_trampoline_addr (r_tramp);
5558 #ifdef TRAMPOLINE_TEMPLATE
5559 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5560 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5561 emit_block_move (blktramp, assemble_trampoline_template (),
5562 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5563 #endif
5564 trampolines_created = 1;
5565 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5567 return const0_rtx;
5570 static rtx
5571 expand_builtin_adjust_trampoline (tree exp)
5573 rtx tramp;
5575 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5576 return NULL_RTX;
5578 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5579 tramp = round_trampoline_addr (tramp);
5580 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5581 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5582 #endif
5584 return tramp;
5587 /* Expand a call to the built-in signbit, signbitf, signbitl, signbitd32,
5588 signbitd64, or signbitd128 function.
5589 Return NULL_RTX if a normal call should be emitted rather than expanding
5590 the function in-line. EXP is the expression that is a call to the builtin
5591 function; if convenient, the result should be placed in TARGET. */
5593 static rtx
5594 expand_builtin_signbit (tree exp, rtx target)
5596 const struct real_format *fmt;
5597 enum machine_mode fmode, imode, rmode;
5598 HOST_WIDE_INT hi, lo;
5599 tree arg;
5600 int word, bitpos;
5601 rtx temp;
5603 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5604 return NULL_RTX;
5606 arg = CALL_EXPR_ARG (exp, 0);
5607 fmode = TYPE_MODE (TREE_TYPE (arg));
5608 rmode = TYPE_MODE (TREE_TYPE (exp));
5609 fmt = REAL_MODE_FORMAT (fmode);
5611 /* For floating point formats without a sign bit, implement signbit
5612 as "ARG < 0.0". */
5613 bitpos = fmt->signbit_ro;
5614 if (bitpos < 0)
5616 /* But we can't do this if the format supports signed zero. */
5617 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5618 return NULL_RTX;
5620 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5621 build_real (TREE_TYPE (arg), dconst0));
5622 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5625 temp = expand_normal (arg);
5626 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5628 imode = int_mode_for_mode (fmode);
5629 if (imode == BLKmode)
5630 return NULL_RTX;
5631 temp = gen_lowpart (imode, temp);
5633 else
5635 imode = word_mode;
5636 /* Handle targets with different FP word orders. */
5637 if (FLOAT_WORDS_BIG_ENDIAN)
5638 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5639 else
5640 word = bitpos / BITS_PER_WORD;
5641 temp = operand_subword_force (temp, word, fmode);
5642 bitpos = bitpos % BITS_PER_WORD;
5645 /* Force the intermediate word_mode (or narrower) result into a
5646 register. This avoids attempting to create paradoxical SUBREGs
5647 of floating point modes below. */
5648 temp = force_reg (imode, temp);
5650 /* If the bitpos is within the "result mode" lowpart, the operation
5651 can be implement with a single bitwise AND. Otherwise, we need
5652 a right shift and an AND. */
5654 if (bitpos < GET_MODE_BITSIZE (rmode))
5656 if (bitpos < HOST_BITS_PER_WIDE_INT)
5658 hi = 0;
5659 lo = (HOST_WIDE_INT) 1 << bitpos;
5661 else
5663 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5664 lo = 0;
5667 if (imode != rmode)
5668 temp = gen_lowpart (rmode, temp);
5669 temp = expand_binop (rmode, and_optab, temp,
5670 immed_double_const (lo, hi, rmode),
5671 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5673 else
5675 /* Perform a logical right shift to place the signbit in the least
5676 significant bit, then truncate the result to the desired mode
5677 and mask just this bit. */
5678 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5679 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5680 temp = gen_lowpart (rmode, temp);
5681 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5682 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5685 return temp;
5688 /* Expand fork or exec calls. TARGET is the desired target of the
5689 call. EXP is the call. FN is the
5690 identificator of the actual function. IGNORE is nonzero if the
5691 value is to be ignored. */
5693 static rtx
5694 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5696 tree id, decl;
5697 tree call;
5699 /* If we are not profiling, just call the function. */
5700 if (!profile_arc_flag)
5701 return NULL_RTX;
5703 /* Otherwise call the wrapper. This should be equivalent for the rest of
5704 compiler, so the code does not diverge, and the wrapper may run the
5705 code necessary for keeping the profiling sane. */
5707 switch (DECL_FUNCTION_CODE (fn))
5709 case BUILT_IN_FORK:
5710 id = get_identifier ("__gcov_fork");
5711 break;
5713 case BUILT_IN_EXECL:
5714 id = get_identifier ("__gcov_execl");
5715 break;
5717 case BUILT_IN_EXECV:
5718 id = get_identifier ("__gcov_execv");
5719 break;
5721 case BUILT_IN_EXECLP:
5722 id = get_identifier ("__gcov_execlp");
5723 break;
5725 case BUILT_IN_EXECLE:
5726 id = get_identifier ("__gcov_execle");
5727 break;
5729 case BUILT_IN_EXECVP:
5730 id = get_identifier ("__gcov_execvp");
5731 break;
5733 case BUILT_IN_EXECVE:
5734 id = get_identifier ("__gcov_execve");
5735 break;
5737 default:
5738 gcc_unreachable ();
5741 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5742 DECL_EXTERNAL (decl) = 1;
5743 TREE_PUBLIC (decl) = 1;
5744 DECL_ARTIFICIAL (decl) = 1;
5745 TREE_NOTHROW (decl) = 1;
5746 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5747 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5748 call = rewrite_call_expr (exp, 0, decl, 0);
5749 return expand_call (call, target, ignore);
5754 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5755 the pointer in these functions is void*, the tree optimizers may remove
5756 casts. The mode computed in expand_builtin isn't reliable either, due
5757 to __sync_bool_compare_and_swap.
5759 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5760 group of builtins. This gives us log2 of the mode size. */
5762 static inline enum machine_mode
5763 get_builtin_sync_mode (int fcode_diff)
5765 /* The size is not negotiable, so ask not to get BLKmode in return
5766 if the target indicates that a smaller size would be better. */
5767 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5770 /* Expand the memory expression LOC and return the appropriate memory operand
5771 for the builtin_sync operations. */
5773 static rtx
5774 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5776 rtx addr, mem;
5778 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5780 /* Note that we explicitly do not want any alias information for this
5781 memory, so that we kill all other live memories. Otherwise we don't
5782 satisfy the full barrier semantics of the intrinsic. */
5783 mem = validize_mem (gen_rtx_MEM (mode, addr));
5785 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5786 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5787 MEM_VOLATILE_P (mem) = 1;
5789 return mem;
5792 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5793 EXP is the CALL_EXPR. CODE is the rtx code
5794 that corresponds to the arithmetic or logical operation from the name;
5795 an exception here is that NOT actually means NAND. TARGET is an optional
5796 place for us to store the results; AFTER is true if this is the
5797 fetch_and_xxx form. IGNORE is true if we don't actually care about
5798 the result of the operation at all. */
5800 static rtx
5801 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5802 enum rtx_code code, bool after,
5803 rtx target, bool ignore)
5805 rtx val, mem;
5806 enum machine_mode old_mode;
5808 /* Expand the operands. */
5809 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5811 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5812 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5813 of CONST_INTs, where we know the old_mode only from the call argument. */
5814 old_mode = GET_MODE (val);
5815 if (old_mode == VOIDmode)
5816 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5817 val = convert_modes (mode, old_mode, val, 1);
5819 if (ignore)
5820 return expand_sync_operation (mem, val, code);
5821 else
5822 return expand_sync_fetch_operation (mem, val, code, after, target);
5825 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5826 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5827 true if this is the boolean form. TARGET is a place for us to store the
5828 results; this is NOT optional if IS_BOOL is true. */
5830 static rtx
5831 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5832 bool is_bool, rtx target)
5834 rtx old_val, new_val, mem;
5835 enum machine_mode old_mode;
5837 /* Expand the operands. */
5838 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5841 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5842 mode, EXPAND_NORMAL);
5843 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5844 of CONST_INTs, where we know the old_mode only from the call argument. */
5845 old_mode = GET_MODE (old_val);
5846 if (old_mode == VOIDmode)
5847 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5848 old_val = convert_modes (mode, old_mode, old_val, 1);
5850 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5851 mode, EXPAND_NORMAL);
5852 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5853 of CONST_INTs, where we know the old_mode only from the call argument. */
5854 old_mode = GET_MODE (new_val);
5855 if (old_mode == VOIDmode)
5856 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5857 new_val = convert_modes (mode, old_mode, new_val, 1);
5859 if (is_bool)
5860 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5861 else
5862 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5865 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5866 general form is actually an atomic exchange, and some targets only
5867 support a reduced form with the second argument being a constant 1.
5868 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5869 the results. */
5871 static rtx
5872 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5873 rtx target)
5875 rtx val, mem;
5876 enum machine_mode old_mode;
5878 /* Expand the operands. */
5879 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5880 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5881 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5882 of CONST_INTs, where we know the old_mode only from the call argument. */
5883 old_mode = GET_MODE (val);
5884 if (old_mode == VOIDmode)
5885 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5886 val = convert_modes (mode, old_mode, val, 1);
5888 return expand_sync_lock_test_and_set (mem, val, target);
5891 /* Expand the __sync_synchronize intrinsic. */
5893 static void
5894 expand_builtin_synchronize (void)
5896 tree x;
5898 #ifdef HAVE_memory_barrier
5899 if (HAVE_memory_barrier)
5901 emit_insn (gen_memory_barrier ());
5902 return;
5904 #endif
5906 /* If no explicit memory barrier instruction is available, create an
5907 empty asm stmt with a memory clobber. */
5908 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
5909 tree_cons (NULL, build_string (6, "memory"), NULL));
5910 ASM_VOLATILE_P (x) = 1;
5911 expand_asm_expr (x);
5914 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5916 static void
5917 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5919 enum insn_code icode;
5920 rtx mem, insn;
5921 rtx val = const0_rtx;
5923 /* Expand the operands. */
5924 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5926 /* If there is an explicit operation in the md file, use it. */
5927 icode = sync_lock_release[mode];
5928 if (icode != CODE_FOR_nothing)
5930 if (!insn_data[icode].operand[1].predicate (val, mode))
5931 val = force_reg (mode, val);
5933 insn = GEN_FCN (icode) (mem, val);
5934 if (insn)
5936 emit_insn (insn);
5937 return;
5941 /* Otherwise we can implement this operation by emitting a barrier
5942 followed by a store of zero. */
5943 expand_builtin_synchronize ();
5944 emit_move_insn (mem, val);
5947 /* Expand an expression EXP that calls a built-in function,
5948 with result going to TARGET if that's convenient
5949 (and in mode MODE if that's convenient).
5950 SUBTARGET may be used as the target for computing one of EXP's operands.
5951 IGNORE is nonzero if the value is to be ignored. */
5954 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5955 int ignore)
5957 tree fndecl = get_callee_fndecl (exp);
5958 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5959 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5961 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5962 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5964 /* When not optimizing, generate calls to library functions for a certain
5965 set of builtins. */
5966 if (!optimize
5967 && !called_as_built_in (fndecl)
5968 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5969 && fcode != BUILT_IN_ALLOCA)
5970 return expand_call (exp, target, ignore);
5972 /* The built-in function expanders test for target == const0_rtx
5973 to determine whether the function's result will be ignored. */
5974 if (ignore)
5975 target = const0_rtx;
5977 /* If the result of a pure or const built-in function is ignored, and
5978 none of its arguments are volatile, we can avoid expanding the
5979 built-in call and just evaluate the arguments for side-effects. */
5980 if (target == const0_rtx
5981 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
5983 bool volatilep = false;
5984 tree arg;
5985 call_expr_arg_iterator iter;
5987 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5988 if (TREE_THIS_VOLATILE (arg))
5990 volatilep = true;
5991 break;
5994 if (! volatilep)
5996 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5997 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5998 return const0_rtx;
6002 switch (fcode)
6004 CASE_FLT_FN (BUILT_IN_FABS):
6005 target = expand_builtin_fabs (exp, target, subtarget);
6006 if (target)
6007 return target;
6008 break;
6010 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6011 target = expand_builtin_copysign (exp, target, subtarget);
6012 if (target)
6013 return target;
6014 break;
6016 /* Just do a normal library call if we were unable to fold
6017 the values. */
6018 CASE_FLT_FN (BUILT_IN_CABS):
6019 break;
6021 CASE_FLT_FN (BUILT_IN_EXP):
6022 CASE_FLT_FN (BUILT_IN_EXP10):
6023 CASE_FLT_FN (BUILT_IN_POW10):
6024 CASE_FLT_FN (BUILT_IN_EXP2):
6025 CASE_FLT_FN (BUILT_IN_EXPM1):
6026 CASE_FLT_FN (BUILT_IN_LOGB):
6027 CASE_FLT_FN (BUILT_IN_LOG):
6028 CASE_FLT_FN (BUILT_IN_LOG10):
6029 CASE_FLT_FN (BUILT_IN_LOG2):
6030 CASE_FLT_FN (BUILT_IN_LOG1P):
6031 CASE_FLT_FN (BUILT_IN_TAN):
6032 CASE_FLT_FN (BUILT_IN_ASIN):
6033 CASE_FLT_FN (BUILT_IN_ACOS):
6034 CASE_FLT_FN (BUILT_IN_ATAN):
6035 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6036 because of possible accuracy problems. */
6037 if (! flag_unsafe_math_optimizations)
6038 break;
6039 CASE_FLT_FN (BUILT_IN_SQRT):
6040 CASE_FLT_FN (BUILT_IN_FLOOR):
6041 CASE_FLT_FN (BUILT_IN_CEIL):
6042 CASE_FLT_FN (BUILT_IN_TRUNC):
6043 CASE_FLT_FN (BUILT_IN_ROUND):
6044 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6045 CASE_FLT_FN (BUILT_IN_RINT):
6046 target = expand_builtin_mathfn (exp, target, subtarget);
6047 if (target)
6048 return target;
6049 break;
6051 CASE_FLT_FN (BUILT_IN_ILOGB):
6052 if (! flag_unsafe_math_optimizations)
6053 break;
6054 CASE_FLT_FN (BUILT_IN_ISINF):
6055 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6056 if (target)
6057 return target;
6058 break;
6060 CASE_FLT_FN (BUILT_IN_LCEIL):
6061 CASE_FLT_FN (BUILT_IN_LLCEIL):
6062 CASE_FLT_FN (BUILT_IN_LFLOOR):
6063 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6064 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6065 if (target)
6066 return target;
6067 break;
6069 CASE_FLT_FN (BUILT_IN_LRINT):
6070 CASE_FLT_FN (BUILT_IN_LLRINT):
6071 CASE_FLT_FN (BUILT_IN_LROUND):
6072 CASE_FLT_FN (BUILT_IN_LLROUND):
6073 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6074 if (target)
6075 return target;
6076 break;
6078 CASE_FLT_FN (BUILT_IN_POW):
6079 target = expand_builtin_pow (exp, target, subtarget);
6080 if (target)
6081 return target;
6082 break;
6084 CASE_FLT_FN (BUILT_IN_POWI):
6085 target = expand_builtin_powi (exp, target, subtarget);
6086 if (target)
6087 return target;
6088 break;
6090 CASE_FLT_FN (BUILT_IN_ATAN2):
6091 CASE_FLT_FN (BUILT_IN_LDEXP):
6092 CASE_FLT_FN (BUILT_IN_SCALB):
6093 CASE_FLT_FN (BUILT_IN_SCALBN):
6094 CASE_FLT_FN (BUILT_IN_SCALBLN):
6095 if (! flag_unsafe_math_optimizations)
6096 break;
6098 CASE_FLT_FN (BUILT_IN_FMOD):
6099 CASE_FLT_FN (BUILT_IN_REMAINDER):
6100 CASE_FLT_FN (BUILT_IN_DREM):
6101 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6102 if (target)
6103 return target;
6104 break;
6106 CASE_FLT_FN (BUILT_IN_CEXPI):
6107 target = expand_builtin_cexpi (exp, target, subtarget);
6108 gcc_assert (target);
6109 return target;
6111 CASE_FLT_FN (BUILT_IN_SIN):
6112 CASE_FLT_FN (BUILT_IN_COS):
6113 if (! flag_unsafe_math_optimizations)
6114 break;
6115 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6116 if (target)
6117 return target;
6118 break;
6120 CASE_FLT_FN (BUILT_IN_SINCOS):
6121 if (! flag_unsafe_math_optimizations)
6122 break;
6123 target = expand_builtin_sincos (exp);
6124 if (target)
6125 return target;
6126 break;
6128 case BUILT_IN_APPLY_ARGS:
6129 return expand_builtin_apply_args ();
6131 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6132 FUNCTION with a copy of the parameters described by
6133 ARGUMENTS, and ARGSIZE. It returns a block of memory
6134 allocated on the stack into which is stored all the registers
6135 that might possibly be used for returning the result of a
6136 function. ARGUMENTS is the value returned by
6137 __builtin_apply_args. ARGSIZE is the number of bytes of
6138 arguments that must be copied. ??? How should this value be
6139 computed? We'll also need a safe worst case value for varargs
6140 functions. */
6141 case BUILT_IN_APPLY:
6142 if (!validate_arglist (exp, POINTER_TYPE,
6143 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6144 && !validate_arglist (exp, REFERENCE_TYPE,
6145 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6146 return const0_rtx;
6147 else
6149 rtx ops[3];
6151 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6152 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6153 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6155 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6158 /* __builtin_return (RESULT) causes the function to return the
6159 value described by RESULT. RESULT is address of the block of
6160 memory returned by __builtin_apply. */
6161 case BUILT_IN_RETURN:
6162 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6163 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6164 return const0_rtx;
6166 case BUILT_IN_SAVEREGS:
6167 return expand_builtin_saveregs ();
6169 case BUILT_IN_ARGS_INFO:
6170 return expand_builtin_args_info (exp);
6172 /* Return the address of the first anonymous stack arg. */
6173 case BUILT_IN_NEXT_ARG:
6174 if (fold_builtin_next_arg (exp, false))
6175 return const0_rtx;
6176 return expand_builtin_next_arg ();
6178 case BUILT_IN_CLASSIFY_TYPE:
6179 return expand_builtin_classify_type (exp);
6181 case BUILT_IN_CONSTANT_P:
6182 return const0_rtx;
6184 case BUILT_IN_FRAME_ADDRESS:
6185 case BUILT_IN_RETURN_ADDRESS:
6186 return expand_builtin_frame_address (fndecl, exp);
6188 /* Returns the address of the area where the structure is returned.
6189 0 otherwise. */
6190 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6191 if (call_expr_nargs (exp) != 0
6192 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6193 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6194 return const0_rtx;
6195 else
6196 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6198 case BUILT_IN_ALLOCA:
6199 target = expand_builtin_alloca (exp, target);
6200 if (target)
6201 return target;
6202 break;
6204 case BUILT_IN_STACK_SAVE:
6205 return expand_stack_save ();
6207 case BUILT_IN_STACK_RESTORE:
6208 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6209 return const0_rtx;
6211 case BUILT_IN_BSWAP32:
6212 case BUILT_IN_BSWAP64:
6213 target = expand_builtin_bswap (exp, target, subtarget);
6215 if (target)
6216 return target;
6217 break;
6219 CASE_INT_FN (BUILT_IN_FFS):
6220 case BUILT_IN_FFSIMAX:
6221 target = expand_builtin_unop (target_mode, exp, target,
6222 subtarget, ffs_optab);
6223 if (target)
6224 return target;
6225 break;
6227 CASE_INT_FN (BUILT_IN_CLZ):
6228 case BUILT_IN_CLZIMAX:
6229 target = expand_builtin_unop (target_mode, exp, target,
6230 subtarget, clz_optab);
6231 if (target)
6232 return target;
6233 break;
6235 CASE_INT_FN (BUILT_IN_CTZ):
6236 case BUILT_IN_CTZIMAX:
6237 target = expand_builtin_unop (target_mode, exp, target,
6238 subtarget, ctz_optab);
6239 if (target)
6240 return target;
6241 break;
6243 CASE_INT_FN (BUILT_IN_POPCOUNT):
6244 case BUILT_IN_POPCOUNTIMAX:
6245 target = expand_builtin_unop (target_mode, exp, target,
6246 subtarget, popcount_optab);
6247 if (target)
6248 return target;
6249 break;
6251 CASE_INT_FN (BUILT_IN_PARITY):
6252 case BUILT_IN_PARITYIMAX:
6253 target = expand_builtin_unop (target_mode, exp, target,
6254 subtarget, parity_optab);
6255 if (target)
6256 return target;
6257 break;
6259 case BUILT_IN_STRLEN:
6260 target = expand_builtin_strlen (exp, target, target_mode);
6261 if (target)
6262 return target;
6263 break;
6265 case BUILT_IN_STRCPY:
6266 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6267 if (target)
6268 return target;
6269 break;
6271 case BUILT_IN_STRNCPY:
6272 target = expand_builtin_strncpy (exp, target, mode);
6273 if (target)
6274 return target;
6275 break;
6277 case BUILT_IN_STPCPY:
6278 target = expand_builtin_stpcpy (exp, target, mode);
6279 if (target)
6280 return target;
6281 break;
6283 case BUILT_IN_STRCAT:
6284 target = expand_builtin_strcat (fndecl, exp, target, mode);
6285 if (target)
6286 return target;
6287 break;
6289 case BUILT_IN_STRNCAT:
6290 target = expand_builtin_strncat (exp, target, mode);
6291 if (target)
6292 return target;
6293 break;
6295 case BUILT_IN_STRSPN:
6296 target = expand_builtin_strspn (exp, target, mode);
6297 if (target)
6298 return target;
6299 break;
6301 case BUILT_IN_STRCSPN:
6302 target = expand_builtin_strcspn (exp, target, mode);
6303 if (target)
6304 return target;
6305 break;
6307 case BUILT_IN_STRSTR:
6308 target = expand_builtin_strstr (exp, target, mode);
6309 if (target)
6310 return target;
6311 break;
6313 case BUILT_IN_STRPBRK:
6314 target = expand_builtin_strpbrk (exp, target, mode);
6315 if (target)
6316 return target;
6317 break;
6319 case BUILT_IN_INDEX:
6320 case BUILT_IN_STRCHR:
6321 target = expand_builtin_strchr (exp, target, mode);
6322 if (target)
6323 return target;
6324 break;
6326 case BUILT_IN_RINDEX:
6327 case BUILT_IN_STRRCHR:
6328 target = expand_builtin_strrchr (exp, target, mode);
6329 if (target)
6330 return target;
6331 break;
6333 case BUILT_IN_MEMCPY:
6334 target = expand_builtin_memcpy (exp, target, mode);
6335 if (target)
6336 return target;
6337 break;
6339 case BUILT_IN_MEMPCPY:
6340 target = expand_builtin_mempcpy (exp, target, mode);
6341 if (target)
6342 return target;
6343 break;
6345 case BUILT_IN_MEMMOVE:
6346 target = expand_builtin_memmove (exp, target, mode, ignore);
6347 if (target)
6348 return target;
6349 break;
6351 case BUILT_IN_BCOPY:
6352 target = expand_builtin_bcopy (exp, ignore);
6353 if (target)
6354 return target;
6355 break;
6357 case BUILT_IN_MEMSET:
6358 target = expand_builtin_memset (exp, target, mode);
6359 if (target)
6360 return target;
6361 break;
6363 case BUILT_IN_BZERO:
6364 target = expand_builtin_bzero (exp);
6365 if (target)
6366 return target;
6367 break;
6369 case BUILT_IN_STRCMP:
6370 target = expand_builtin_strcmp (exp, target, mode);
6371 if (target)
6372 return target;
6373 break;
6375 case BUILT_IN_STRNCMP:
6376 target = expand_builtin_strncmp (exp, target, mode);
6377 if (target)
6378 return target;
6379 break;
6381 case BUILT_IN_MEMCHR:
6382 target = expand_builtin_memchr (exp, target, mode);
6383 if (target)
6384 return target;
6385 break;
6387 case BUILT_IN_BCMP:
6388 case BUILT_IN_MEMCMP:
6389 target = expand_builtin_memcmp (exp, target, mode);
6390 if (target)
6391 return target;
6392 break;
6394 case BUILT_IN_SETJMP:
6395 /* This should have been lowered to the builtins below. */
6396 gcc_unreachable ();
6398 case BUILT_IN_SETJMP_SETUP:
6399 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6400 and the receiver label. */
6401 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6403 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6404 VOIDmode, EXPAND_NORMAL);
6405 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6406 rtx label_r = label_rtx (label);
6408 /* This is copied from the handling of non-local gotos. */
6409 expand_builtin_setjmp_setup (buf_addr, label_r);
6410 nonlocal_goto_handler_labels
6411 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6412 nonlocal_goto_handler_labels);
6413 /* ??? Do not let expand_label treat us as such since we would
6414 not want to be both on the list of non-local labels and on
6415 the list of forced labels. */
6416 FORCED_LABEL (label) = 0;
6417 return const0_rtx;
6419 break;
6421 case BUILT_IN_SETJMP_DISPATCHER:
6422 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6423 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6425 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6426 rtx label_r = label_rtx (label);
6428 /* Remove the dispatcher label from the list of non-local labels
6429 since the receiver labels have been added to it above. */
6430 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6431 return const0_rtx;
6433 break;
6435 case BUILT_IN_SETJMP_RECEIVER:
6436 /* __builtin_setjmp_receiver is passed the receiver label. */
6437 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6439 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6440 rtx label_r = label_rtx (label);
6442 expand_builtin_setjmp_receiver (label_r);
6443 return const0_rtx;
6445 break;
6447 /* __builtin_longjmp is passed a pointer to an array of five words.
6448 It's similar to the C library longjmp function but works with
6449 __builtin_setjmp above. */
6450 case BUILT_IN_LONGJMP:
6451 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6453 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6454 VOIDmode, EXPAND_NORMAL);
6455 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6457 if (value != const1_rtx)
6459 error ("%<__builtin_longjmp%> second argument must be 1");
6460 return const0_rtx;
6463 expand_builtin_longjmp (buf_addr, value);
6464 return const0_rtx;
6466 break;
6468 case BUILT_IN_NONLOCAL_GOTO:
6469 target = expand_builtin_nonlocal_goto (exp);
6470 if (target)
6471 return target;
6472 break;
6474 /* This updates the setjmp buffer that is its argument with the value
6475 of the current stack pointer. */
6476 case BUILT_IN_UPDATE_SETJMP_BUF:
6477 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6479 rtx buf_addr
6480 = expand_normal (CALL_EXPR_ARG (exp, 0));
6482 expand_builtin_update_setjmp_buf (buf_addr);
6483 return const0_rtx;
6485 break;
6487 case BUILT_IN_TRAP:
6488 expand_builtin_trap ();
6489 return const0_rtx;
6491 case BUILT_IN_PRINTF:
6492 target = expand_builtin_printf (exp, target, mode, false);
6493 if (target)
6494 return target;
6495 break;
6497 case BUILT_IN_PRINTF_UNLOCKED:
6498 target = expand_builtin_printf (exp, target, mode, true);
6499 if (target)
6500 return target;
6501 break;
6503 case BUILT_IN_FPUTS:
6504 target = expand_builtin_fputs (exp, target, false);
6505 if (target)
6506 return target;
6507 break;
6508 case BUILT_IN_FPUTS_UNLOCKED:
6509 target = expand_builtin_fputs (exp, target, true);
6510 if (target)
6511 return target;
6512 break;
6514 case BUILT_IN_FPRINTF:
6515 target = expand_builtin_fprintf (exp, target, mode, false);
6516 if (target)
6517 return target;
6518 break;
6520 case BUILT_IN_FPRINTF_UNLOCKED:
6521 target = expand_builtin_fprintf (exp, target, mode, true);
6522 if (target)
6523 return target;
6524 break;
6526 case BUILT_IN_SPRINTF:
6527 target = expand_builtin_sprintf (exp, target, mode);
6528 if (target)
6529 return target;
6530 break;
6532 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6533 case BUILT_IN_SIGNBITD32:
6534 case BUILT_IN_SIGNBITD64:
6535 case BUILT_IN_SIGNBITD128:
6536 target = expand_builtin_signbit (exp, target);
6537 if (target)
6538 return target;
6539 break;
6541 /* Various hooks for the DWARF 2 __throw routine. */
6542 case BUILT_IN_UNWIND_INIT:
6543 expand_builtin_unwind_init ();
6544 return const0_rtx;
6545 case BUILT_IN_DWARF_CFA:
6546 return virtual_cfa_rtx;
6547 #ifdef DWARF2_UNWIND_INFO
6548 case BUILT_IN_DWARF_SP_COLUMN:
6549 return expand_builtin_dwarf_sp_column ();
6550 case BUILT_IN_INIT_DWARF_REG_SIZES:
6551 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6552 return const0_rtx;
6553 #endif
6554 case BUILT_IN_FROB_RETURN_ADDR:
6555 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6556 case BUILT_IN_EXTRACT_RETURN_ADDR:
6557 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6558 case BUILT_IN_EH_RETURN:
6559 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6560 CALL_EXPR_ARG (exp, 1));
6561 return const0_rtx;
6562 #ifdef EH_RETURN_DATA_REGNO
6563 case BUILT_IN_EH_RETURN_DATA_REGNO:
6564 return expand_builtin_eh_return_data_regno (exp);
6565 #endif
6566 case BUILT_IN_EXTEND_POINTER:
6567 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6569 case BUILT_IN_VA_START:
6570 case BUILT_IN_STDARG_START:
6571 return expand_builtin_va_start (exp);
6572 case BUILT_IN_VA_END:
6573 return expand_builtin_va_end (exp);
6574 case BUILT_IN_VA_COPY:
6575 return expand_builtin_va_copy (exp);
6576 case BUILT_IN_EXPECT:
6577 return expand_builtin_expect (exp, target);
6578 case BUILT_IN_PREFETCH:
6579 expand_builtin_prefetch (exp);
6580 return const0_rtx;
6582 case BUILT_IN_PROFILE_FUNC_ENTER:
6583 return expand_builtin_profile_func (false);
6584 case BUILT_IN_PROFILE_FUNC_EXIT:
6585 return expand_builtin_profile_func (true);
6587 case BUILT_IN_INIT_TRAMPOLINE:
6588 return expand_builtin_init_trampoline (exp);
6589 case BUILT_IN_ADJUST_TRAMPOLINE:
6590 return expand_builtin_adjust_trampoline (exp);
6592 case BUILT_IN_FORK:
6593 case BUILT_IN_EXECL:
6594 case BUILT_IN_EXECV:
6595 case BUILT_IN_EXECLP:
6596 case BUILT_IN_EXECLE:
6597 case BUILT_IN_EXECVP:
6598 case BUILT_IN_EXECVE:
6599 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6600 if (target)
6601 return target;
6602 break;
6604 case BUILT_IN_FETCH_AND_ADD_1:
6605 case BUILT_IN_FETCH_AND_ADD_2:
6606 case BUILT_IN_FETCH_AND_ADD_4:
6607 case BUILT_IN_FETCH_AND_ADD_8:
6608 case BUILT_IN_FETCH_AND_ADD_16:
6609 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6610 target = expand_builtin_sync_operation (mode, exp, PLUS,
6611 false, target, ignore);
6612 if (target)
6613 return target;
6614 break;
6616 case BUILT_IN_FETCH_AND_SUB_1:
6617 case BUILT_IN_FETCH_AND_SUB_2:
6618 case BUILT_IN_FETCH_AND_SUB_4:
6619 case BUILT_IN_FETCH_AND_SUB_8:
6620 case BUILT_IN_FETCH_AND_SUB_16:
6621 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6622 target = expand_builtin_sync_operation (mode, exp, MINUS,
6623 false, target, ignore);
6624 if (target)
6625 return target;
6626 break;
6628 case BUILT_IN_FETCH_AND_OR_1:
6629 case BUILT_IN_FETCH_AND_OR_2:
6630 case BUILT_IN_FETCH_AND_OR_4:
6631 case BUILT_IN_FETCH_AND_OR_8:
6632 case BUILT_IN_FETCH_AND_OR_16:
6633 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6634 target = expand_builtin_sync_operation (mode, exp, IOR,
6635 false, target, ignore);
6636 if (target)
6637 return target;
6638 break;
6640 case BUILT_IN_FETCH_AND_AND_1:
6641 case BUILT_IN_FETCH_AND_AND_2:
6642 case BUILT_IN_FETCH_AND_AND_4:
6643 case BUILT_IN_FETCH_AND_AND_8:
6644 case BUILT_IN_FETCH_AND_AND_16:
6645 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6646 target = expand_builtin_sync_operation (mode, exp, AND,
6647 false, target, ignore);
6648 if (target)
6649 return target;
6650 break;
6652 case BUILT_IN_FETCH_AND_XOR_1:
6653 case BUILT_IN_FETCH_AND_XOR_2:
6654 case BUILT_IN_FETCH_AND_XOR_4:
6655 case BUILT_IN_FETCH_AND_XOR_8:
6656 case BUILT_IN_FETCH_AND_XOR_16:
6657 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6658 target = expand_builtin_sync_operation (mode, exp, XOR,
6659 false, target, ignore);
6660 if (target)
6661 return target;
6662 break;
6664 case BUILT_IN_FETCH_AND_NAND_1:
6665 case BUILT_IN_FETCH_AND_NAND_2:
6666 case BUILT_IN_FETCH_AND_NAND_4:
6667 case BUILT_IN_FETCH_AND_NAND_8:
6668 case BUILT_IN_FETCH_AND_NAND_16:
6669 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6670 target = expand_builtin_sync_operation (mode, exp, NOT,
6671 false, target, ignore);
6672 if (target)
6673 return target;
6674 break;
6676 case BUILT_IN_ADD_AND_FETCH_1:
6677 case BUILT_IN_ADD_AND_FETCH_2:
6678 case BUILT_IN_ADD_AND_FETCH_4:
6679 case BUILT_IN_ADD_AND_FETCH_8:
6680 case BUILT_IN_ADD_AND_FETCH_16:
6681 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6682 target = expand_builtin_sync_operation (mode, exp, PLUS,
6683 true, target, ignore);
6684 if (target)
6685 return target;
6686 break;
6688 case BUILT_IN_SUB_AND_FETCH_1:
6689 case BUILT_IN_SUB_AND_FETCH_2:
6690 case BUILT_IN_SUB_AND_FETCH_4:
6691 case BUILT_IN_SUB_AND_FETCH_8:
6692 case BUILT_IN_SUB_AND_FETCH_16:
6693 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6694 target = expand_builtin_sync_operation (mode, exp, MINUS,
6695 true, target, ignore);
6696 if (target)
6697 return target;
6698 break;
6700 case BUILT_IN_OR_AND_FETCH_1:
6701 case BUILT_IN_OR_AND_FETCH_2:
6702 case BUILT_IN_OR_AND_FETCH_4:
6703 case BUILT_IN_OR_AND_FETCH_8:
6704 case BUILT_IN_OR_AND_FETCH_16:
6705 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6706 target = expand_builtin_sync_operation (mode, exp, IOR,
6707 true, target, ignore);
6708 if (target)
6709 return target;
6710 break;
6712 case BUILT_IN_AND_AND_FETCH_1:
6713 case BUILT_IN_AND_AND_FETCH_2:
6714 case BUILT_IN_AND_AND_FETCH_4:
6715 case BUILT_IN_AND_AND_FETCH_8:
6716 case BUILT_IN_AND_AND_FETCH_16:
6717 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6718 target = expand_builtin_sync_operation (mode, exp, AND,
6719 true, target, ignore);
6720 if (target)
6721 return target;
6722 break;
6724 case BUILT_IN_XOR_AND_FETCH_1:
6725 case BUILT_IN_XOR_AND_FETCH_2:
6726 case BUILT_IN_XOR_AND_FETCH_4:
6727 case BUILT_IN_XOR_AND_FETCH_8:
6728 case BUILT_IN_XOR_AND_FETCH_16:
6729 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6730 target = expand_builtin_sync_operation (mode, exp, XOR,
6731 true, target, ignore);
6732 if (target)
6733 return target;
6734 break;
6736 case BUILT_IN_NAND_AND_FETCH_1:
6737 case BUILT_IN_NAND_AND_FETCH_2:
6738 case BUILT_IN_NAND_AND_FETCH_4:
6739 case BUILT_IN_NAND_AND_FETCH_8:
6740 case BUILT_IN_NAND_AND_FETCH_16:
6741 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6742 target = expand_builtin_sync_operation (mode, exp, NOT,
6743 true, target, ignore);
6744 if (target)
6745 return target;
6746 break;
6748 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6749 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6750 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6751 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6752 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6753 if (mode == VOIDmode)
6754 mode = TYPE_MODE (boolean_type_node);
6755 if (!target || !register_operand (target, mode))
6756 target = gen_reg_rtx (mode);
6758 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6759 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6760 if (target)
6761 return target;
6762 break;
6764 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6765 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6766 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6767 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6768 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6769 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6770 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6771 if (target)
6772 return target;
6773 break;
6775 case BUILT_IN_LOCK_TEST_AND_SET_1:
6776 case BUILT_IN_LOCK_TEST_AND_SET_2:
6777 case BUILT_IN_LOCK_TEST_AND_SET_4:
6778 case BUILT_IN_LOCK_TEST_AND_SET_8:
6779 case BUILT_IN_LOCK_TEST_AND_SET_16:
6780 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6781 target = expand_builtin_lock_test_and_set (mode, exp, target);
6782 if (target)
6783 return target;
6784 break;
6786 case BUILT_IN_LOCK_RELEASE_1:
6787 case BUILT_IN_LOCK_RELEASE_2:
6788 case BUILT_IN_LOCK_RELEASE_4:
6789 case BUILT_IN_LOCK_RELEASE_8:
6790 case BUILT_IN_LOCK_RELEASE_16:
6791 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6792 expand_builtin_lock_release (mode, exp);
6793 return const0_rtx;
6795 case BUILT_IN_SYNCHRONIZE:
6796 expand_builtin_synchronize ();
6797 return const0_rtx;
6799 case BUILT_IN_OBJECT_SIZE:
6800 return expand_builtin_object_size (exp);
6802 case BUILT_IN_MEMCPY_CHK:
6803 case BUILT_IN_MEMPCPY_CHK:
6804 case BUILT_IN_MEMMOVE_CHK:
6805 case BUILT_IN_MEMSET_CHK:
6806 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6807 if (target)
6808 return target;
6809 break;
6811 case BUILT_IN_STRCPY_CHK:
6812 case BUILT_IN_STPCPY_CHK:
6813 case BUILT_IN_STRNCPY_CHK:
6814 case BUILT_IN_STRCAT_CHK:
6815 case BUILT_IN_STRNCAT_CHK:
6816 case BUILT_IN_SNPRINTF_CHK:
6817 case BUILT_IN_VSNPRINTF_CHK:
6818 maybe_emit_chk_warning (exp, fcode);
6819 break;
6821 case BUILT_IN_SPRINTF_CHK:
6822 case BUILT_IN_VSPRINTF_CHK:
6823 maybe_emit_sprintf_chk_warning (exp, fcode);
6824 break;
6826 default: /* just do library call, if unknown builtin */
6827 break;
6830 /* The switch statement above can drop through to cause the function
6831 to be called normally. */
6832 return expand_call (exp, target, ignore);
6835 /* Determine whether a tree node represents a call to a built-in
6836 function. If the tree T is a call to a built-in function with
6837 the right number of arguments of the appropriate types, return
6838 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6839 Otherwise the return value is END_BUILTINS. */
6841 enum built_in_function
6842 builtin_mathfn_code (tree t)
6844 tree fndecl, arg, parmlist;
6845 tree argtype, parmtype;
6846 call_expr_arg_iterator iter;
6848 if (TREE_CODE (t) != CALL_EXPR
6849 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6850 return END_BUILTINS;
6852 fndecl = get_callee_fndecl (t);
6853 if (fndecl == NULL_TREE
6854 || TREE_CODE (fndecl) != FUNCTION_DECL
6855 || ! DECL_BUILT_IN (fndecl)
6856 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6857 return END_BUILTINS;
6859 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6860 init_call_expr_arg_iterator (t, &iter);
6861 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6863 /* If a function doesn't take a variable number of arguments,
6864 the last element in the list will have type `void'. */
6865 parmtype = TREE_VALUE (parmlist);
6866 if (VOID_TYPE_P (parmtype))
6868 if (more_call_expr_args_p (&iter))
6869 return END_BUILTINS;
6870 return DECL_FUNCTION_CODE (fndecl);
6873 if (! more_call_expr_args_p (&iter))
6874 return END_BUILTINS;
6876 arg = next_call_expr_arg (&iter);
6877 argtype = TREE_TYPE (arg);
6879 if (SCALAR_FLOAT_TYPE_P (parmtype))
6881 if (! SCALAR_FLOAT_TYPE_P (argtype))
6882 return END_BUILTINS;
6884 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6886 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6887 return END_BUILTINS;
6889 else if (POINTER_TYPE_P (parmtype))
6891 if (! POINTER_TYPE_P (argtype))
6892 return END_BUILTINS;
6894 else if (INTEGRAL_TYPE_P (parmtype))
6896 if (! INTEGRAL_TYPE_P (argtype))
6897 return END_BUILTINS;
6899 else
6900 return END_BUILTINS;
6903 /* Variable-length argument list. */
6904 return DECL_FUNCTION_CODE (fndecl);
6907 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6908 evaluate to a constant. */
6910 static tree
6911 fold_builtin_constant_p (tree arg)
6913 /* We return 1 for a numeric type that's known to be a constant
6914 value at compile-time or for an aggregate type that's a
6915 literal constant. */
6916 STRIP_NOPS (arg);
6918 /* If we know this is a constant, emit the constant of one. */
6919 if (CONSTANT_CLASS_P (arg)
6920 || (TREE_CODE (arg) == CONSTRUCTOR
6921 && TREE_CONSTANT (arg)))
6922 return integer_one_node;
6923 if (TREE_CODE (arg) == ADDR_EXPR)
6925 tree op = TREE_OPERAND (arg, 0);
6926 if (TREE_CODE (op) == STRING_CST
6927 || (TREE_CODE (op) == ARRAY_REF
6928 && integer_zerop (TREE_OPERAND (op, 1))
6929 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6930 return integer_one_node;
6933 /* If this expression has side effects, show we don't know it to be a
6934 constant. Likewise if it's a pointer or aggregate type since in
6935 those case we only want literals, since those are only optimized
6936 when generating RTL, not later.
6937 And finally, if we are compiling an initializer, not code, we
6938 need to return a definite result now; there's not going to be any
6939 more optimization done. */
6940 if (TREE_SIDE_EFFECTS (arg)
6941 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6942 || POINTER_TYPE_P (TREE_TYPE (arg))
6943 || cfun == 0
6944 || folding_initializer)
6945 return integer_zero_node;
6947 return NULL_TREE;
6950 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
6951 comparison against the argument will fold to a constant. In practice,
6952 this means a true constant or the address of a non-weak symbol. */
6954 static tree
6955 fold_builtin_expect (tree arg)
6957 tree inner;
6959 /* If the argument isn't invariant, then there's nothing we can do. */
6960 if (!TREE_INVARIANT (arg))
6961 return NULL_TREE;
6963 /* If we're looking at an address of a weak decl, then do not fold. */
6964 inner = arg;
6965 STRIP_NOPS (inner);
6966 if (TREE_CODE (inner) == ADDR_EXPR)
6970 inner = TREE_OPERAND (inner, 0);
6972 while (TREE_CODE (inner) == COMPONENT_REF
6973 || TREE_CODE (inner) == ARRAY_REF);
6974 if (DECL_P (inner) && DECL_WEAK (inner))
6975 return NULL_TREE;
6978 /* Otherwise, ARG already has the proper type for the return value. */
6979 return arg;
6982 /* Fold a call to __builtin_classify_type with argument ARG. */
6984 static tree
6985 fold_builtin_classify_type (tree arg)
6987 if (arg == 0)
6988 return build_int_cst (NULL_TREE, no_type_class);
6990 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6993 /* Fold a call to __builtin_strlen with argument ARG. */
6995 static tree
6996 fold_builtin_strlen (tree arg)
6998 if (!validate_arg (arg, POINTER_TYPE))
6999 return NULL_TREE;
7000 else
7002 tree len = c_strlen (arg, 0);
7004 if (len)
7006 /* Convert from the internal "sizetype" type to "size_t". */
7007 if (size_type_node)
7008 len = fold_convert (size_type_node, len);
7009 return len;
7012 return NULL_TREE;
7016 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7018 static tree
7019 fold_builtin_inf (tree type, int warn)
7021 REAL_VALUE_TYPE real;
7023 /* __builtin_inff is intended to be usable to define INFINITY on all
7024 targets. If an infinity is not available, INFINITY expands "to a
7025 positive constant of type float that overflows at translation
7026 time", footnote "In this case, using INFINITY will violate the
7027 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7028 Thus we pedwarn to ensure this constraint violation is
7029 diagnosed. */
7030 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7031 pedwarn ("target format does not support infinity");
7033 real_inf (&real);
7034 return build_real (type, real);
7037 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7039 static tree
7040 fold_builtin_nan (tree arg, tree type, int quiet)
7042 REAL_VALUE_TYPE real;
7043 const char *str;
7045 if (!validate_arg (arg, POINTER_TYPE))
7046 return NULL_TREE;
7047 str = c_getstr (arg);
7048 if (!str)
7049 return NULL_TREE;
7051 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7052 return NULL_TREE;
7054 return build_real (type, real);
7057 /* Return true if the floating point expression T has an integer value.
7058 We also allow +Inf, -Inf and NaN to be considered integer values. */
7060 static bool
7061 integer_valued_real_p (tree t)
7063 switch (TREE_CODE (t))
7065 case FLOAT_EXPR:
7066 return true;
7068 case ABS_EXPR:
7069 case SAVE_EXPR:
7070 case NON_LVALUE_EXPR:
7071 return integer_valued_real_p (TREE_OPERAND (t, 0));
7073 case COMPOUND_EXPR:
7074 case MODIFY_EXPR:
7075 case BIND_EXPR:
7076 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7078 case PLUS_EXPR:
7079 case MINUS_EXPR:
7080 case MULT_EXPR:
7081 case MIN_EXPR:
7082 case MAX_EXPR:
7083 return integer_valued_real_p (TREE_OPERAND (t, 0))
7084 && integer_valued_real_p (TREE_OPERAND (t, 1));
7086 case COND_EXPR:
7087 return integer_valued_real_p (TREE_OPERAND (t, 1))
7088 && integer_valued_real_p (TREE_OPERAND (t, 2));
7090 case REAL_CST:
7091 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7093 case NOP_EXPR:
7095 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7096 if (TREE_CODE (type) == INTEGER_TYPE)
7097 return true;
7098 if (TREE_CODE (type) == REAL_TYPE)
7099 return integer_valued_real_p (TREE_OPERAND (t, 0));
7100 break;
7103 case CALL_EXPR:
7104 switch (builtin_mathfn_code (t))
7106 CASE_FLT_FN (BUILT_IN_CEIL):
7107 CASE_FLT_FN (BUILT_IN_FLOOR):
7108 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7109 CASE_FLT_FN (BUILT_IN_RINT):
7110 CASE_FLT_FN (BUILT_IN_ROUND):
7111 CASE_FLT_FN (BUILT_IN_TRUNC):
7112 return true;
7114 CASE_FLT_FN (BUILT_IN_FMIN):
7115 CASE_FLT_FN (BUILT_IN_FMAX):
7116 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7117 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7119 default:
7120 break;
7122 break;
7124 default:
7125 break;
7127 return false;
7130 /* FNDECL is assumed to be a builtin where truncation can be propagated
7131 across (for instance floor((double)f) == (double)floorf (f).
7132 Do the transformation for a call with argument ARG. */
7134 static tree
7135 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7137 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7139 if (!validate_arg (arg, REAL_TYPE))
7140 return NULL_TREE;
7142 /* Integer rounding functions are idempotent. */
7143 if (fcode == builtin_mathfn_code (arg))
7144 return arg;
7146 /* If argument is already integer valued, and we don't need to worry
7147 about setting errno, there's no need to perform rounding. */
7148 if (! flag_errno_math && integer_valued_real_p (arg))
7149 return arg;
7151 if (optimize)
7153 tree arg0 = strip_float_extensions (arg);
7154 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7155 tree newtype = TREE_TYPE (arg0);
7156 tree decl;
7158 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7159 && (decl = mathfn_built_in (newtype, fcode)))
7160 return fold_convert (ftype,
7161 build_call_expr (decl, 1,
7162 fold_convert (newtype, arg0)));
7164 return NULL_TREE;
7167 /* FNDECL is assumed to be builtin which can narrow the FP type of
7168 the argument, for instance lround((double)f) -> lroundf (f).
7169 Do the transformation for a call with argument ARG. */
7171 static tree
7172 fold_fixed_mathfn (tree fndecl, tree arg)
7174 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7176 if (!validate_arg (arg, REAL_TYPE))
7177 return NULL_TREE;
7179 /* If argument is already integer valued, and we don't need to worry
7180 about setting errno, there's no need to perform rounding. */
7181 if (! flag_errno_math && integer_valued_real_p (arg))
7182 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7184 if (optimize)
7186 tree ftype = TREE_TYPE (arg);
7187 tree arg0 = strip_float_extensions (arg);
7188 tree newtype = TREE_TYPE (arg0);
7189 tree decl;
7191 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7192 && (decl = mathfn_built_in (newtype, fcode)))
7193 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7196 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7197 sizeof (long long) == sizeof (long). */
7198 if (TYPE_PRECISION (long_long_integer_type_node)
7199 == TYPE_PRECISION (long_integer_type_node))
7201 tree newfn = NULL_TREE;
7202 switch (fcode)
7204 CASE_FLT_FN (BUILT_IN_LLCEIL):
7205 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7206 break;
7208 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7209 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7210 break;
7212 CASE_FLT_FN (BUILT_IN_LLROUND):
7213 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7214 break;
7216 CASE_FLT_FN (BUILT_IN_LLRINT):
7217 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7218 break;
7220 default:
7221 break;
7224 if (newfn)
7226 tree newcall = build_call_expr(newfn, 1, arg);
7227 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7231 return NULL_TREE;
7234 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7235 return type. Return NULL_TREE if no simplification can be made. */
7237 static tree
7238 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7240 tree res;
7242 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7243 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7244 return NULL_TREE;
7246 /* Calculate the result when the argument is a constant. */
7247 if (TREE_CODE (arg) == COMPLEX_CST
7248 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7249 type, mpfr_hypot)))
7250 return res;
7252 if (TREE_CODE (arg) == COMPLEX_EXPR)
7254 tree real = TREE_OPERAND (arg, 0);
7255 tree imag = TREE_OPERAND (arg, 1);
7257 /* If either part is zero, cabs is fabs of the other. */
7258 if (real_zerop (real))
7259 return fold_build1 (ABS_EXPR, type, imag);
7260 if (real_zerop (imag))
7261 return fold_build1 (ABS_EXPR, type, real);
7263 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7264 if (flag_unsafe_math_optimizations
7265 && operand_equal_p (real, imag, OEP_PURE_SAME))
7267 const REAL_VALUE_TYPE sqrt2_trunc
7268 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7269 STRIP_NOPS (real);
7270 return fold_build2 (MULT_EXPR, type,
7271 fold_build1 (ABS_EXPR, type, real),
7272 build_real (type, sqrt2_trunc));
7276 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7277 if (TREE_CODE (arg) == NEGATE_EXPR
7278 || TREE_CODE (arg) == CONJ_EXPR)
7279 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7281 /* Don't do this when optimizing for size. */
7282 if (flag_unsafe_math_optimizations
7283 && optimize && !optimize_size)
7285 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7287 if (sqrtfn != NULL_TREE)
7289 tree rpart, ipart, result;
7291 arg = builtin_save_expr (arg);
7293 rpart = fold_build1 (REALPART_EXPR, type, arg);
7294 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7296 rpart = builtin_save_expr (rpart);
7297 ipart = builtin_save_expr (ipart);
7299 result = fold_build2 (PLUS_EXPR, type,
7300 fold_build2 (MULT_EXPR, type,
7301 rpart, rpart),
7302 fold_build2 (MULT_EXPR, type,
7303 ipart, ipart));
7305 return build_call_expr (sqrtfn, 1, result);
7309 return NULL_TREE;
7312 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7313 Return NULL_TREE if no simplification can be made. */
7315 static tree
7316 fold_builtin_sqrt (tree arg, tree type)
7319 enum built_in_function fcode;
7320 tree res;
7322 if (!validate_arg (arg, REAL_TYPE))
7323 return NULL_TREE;
7325 /* Calculate the result when the argument is a constant. */
7326 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7327 return res;
7329 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7330 fcode = builtin_mathfn_code (arg);
7331 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7333 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7334 arg = fold_build2 (MULT_EXPR, type,
7335 CALL_EXPR_ARG (arg, 0),
7336 build_real (type, dconsthalf));
7337 return build_call_expr (expfn, 1, arg);
7340 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7341 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7343 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7345 if (powfn)
7347 tree arg0 = CALL_EXPR_ARG (arg, 0);
7348 tree tree_root;
7349 /* The inner root was either sqrt or cbrt. */
7350 REAL_VALUE_TYPE dconstroot =
7351 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
7353 /* Adjust for the outer root. */
7354 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7355 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7356 tree_root = build_real (type, dconstroot);
7357 return build_call_expr (powfn, 2, arg0, tree_root);
7361 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7362 if (flag_unsafe_math_optimizations
7363 && (fcode == BUILT_IN_POW
7364 || fcode == BUILT_IN_POWF
7365 || fcode == BUILT_IN_POWL))
7367 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7368 tree arg0 = CALL_EXPR_ARG (arg, 0);
7369 tree arg1 = CALL_EXPR_ARG (arg, 1);
7370 tree narg1;
7371 if (!tree_expr_nonnegative_p (arg0))
7372 arg0 = build1 (ABS_EXPR, type, arg0);
7373 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7374 build_real (type, dconsthalf));
7375 return build_call_expr (powfn, 2, arg0, narg1);
7378 return NULL_TREE;
7381 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7382 Return NULL_TREE if no simplification can be made. */
7384 static tree
7385 fold_builtin_cbrt (tree arg, tree type)
7387 const enum built_in_function fcode = builtin_mathfn_code (arg);
7388 tree res;
7390 if (!validate_arg (arg, REAL_TYPE))
7391 return NULL_TREE;
7393 /* Calculate the result when the argument is a constant. */
7394 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7395 return res;
7397 if (flag_unsafe_math_optimizations)
7399 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7400 if (BUILTIN_EXPONENT_P (fcode))
7402 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7403 const REAL_VALUE_TYPE third_trunc =
7404 real_value_truncate (TYPE_MODE (type), dconstthird);
7405 arg = fold_build2 (MULT_EXPR, type,
7406 CALL_EXPR_ARG (arg, 0),
7407 build_real (type, third_trunc));
7408 return build_call_expr (expfn, 1, arg);
7411 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7412 if (BUILTIN_SQRT_P (fcode))
7414 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7416 if (powfn)
7418 tree arg0 = CALL_EXPR_ARG (arg, 0);
7419 tree tree_root;
7420 REAL_VALUE_TYPE dconstroot = dconstthird;
7422 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7423 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7424 tree_root = build_real (type, dconstroot);
7425 return build_call_expr (powfn, 2, arg0, tree_root);
7429 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7430 if (BUILTIN_CBRT_P (fcode))
7432 tree arg0 = CALL_EXPR_ARG (arg, 0);
7433 if (tree_expr_nonnegative_p (arg0))
7435 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7437 if (powfn)
7439 tree tree_root;
7440 REAL_VALUE_TYPE dconstroot;
7442 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
7443 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7444 tree_root = build_real (type, dconstroot);
7445 return build_call_expr (powfn, 2, arg0, tree_root);
7450 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7451 if (fcode == BUILT_IN_POW
7452 || fcode == BUILT_IN_POWF
7453 || fcode == BUILT_IN_POWL)
7455 tree arg00 = CALL_EXPR_ARG (arg, 0);
7456 tree arg01 = CALL_EXPR_ARG (arg, 1);
7457 if (tree_expr_nonnegative_p (arg00))
7459 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7460 const REAL_VALUE_TYPE dconstroot
7461 = real_value_truncate (TYPE_MODE (type), dconstthird);
7462 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7463 build_real (type, dconstroot));
7464 return build_call_expr (powfn, 2, arg00, narg01);
7468 return NULL_TREE;
7471 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7472 TYPE is the type of the return value. Return NULL_TREE if no
7473 simplification can be made. */
7475 static tree
7476 fold_builtin_cos (tree arg, tree type, tree fndecl)
7478 tree res, narg;
7480 if (!validate_arg (arg, REAL_TYPE))
7481 return NULL_TREE;
7483 /* Calculate the result when the argument is a constant. */
7484 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7485 return res;
7487 /* Optimize cos(-x) into cos (x). */
7488 if ((narg = fold_strip_sign_ops (arg)))
7489 return build_call_expr (fndecl, 1, narg);
7491 return NULL_TREE;
7494 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7495 Return NULL_TREE if no simplification can be made. */
7497 static tree
7498 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7500 if (validate_arg (arg, REAL_TYPE))
7502 tree res, narg;
7504 /* Calculate the result when the argument is a constant. */
7505 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7506 return res;
7508 /* Optimize cosh(-x) into cosh (x). */
7509 if ((narg = fold_strip_sign_ops (arg)))
7510 return build_call_expr (fndecl, 1, narg);
7513 return NULL_TREE;
7516 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7517 Return NULL_TREE if no simplification can be made. */
7519 static tree
7520 fold_builtin_tan (tree arg, tree type)
7522 enum built_in_function fcode;
7523 tree res;
7525 if (!validate_arg (arg, REAL_TYPE))
7526 return NULL_TREE;
7528 /* Calculate the result when the argument is a constant. */
7529 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7530 return res;
7532 /* Optimize tan(atan(x)) = x. */
7533 fcode = builtin_mathfn_code (arg);
7534 if (flag_unsafe_math_optimizations
7535 && (fcode == BUILT_IN_ATAN
7536 || fcode == BUILT_IN_ATANF
7537 || fcode == BUILT_IN_ATANL))
7538 return CALL_EXPR_ARG (arg, 0);
7540 return NULL_TREE;
7543 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7544 NULL_TREE if no simplification can be made. */
7546 static tree
7547 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7549 tree type;
7550 tree res, fn, call;
7552 if (!validate_arg (arg0, REAL_TYPE)
7553 || !validate_arg (arg1, POINTER_TYPE)
7554 || !validate_arg (arg2, POINTER_TYPE))
7555 return NULL_TREE;
7557 type = TREE_TYPE (arg0);
7559 /* Calculate the result when the argument is a constant. */
7560 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7561 return res;
7563 /* Canonicalize sincos to cexpi. */
7564 if (!TARGET_C99_FUNCTIONS)
7565 return NULL_TREE;
7566 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7567 if (!fn)
7568 return NULL_TREE;
7570 call = build_call_expr (fn, 1, arg0);
7571 call = builtin_save_expr (call);
7573 return build2 (COMPOUND_EXPR, type,
7574 build2 (MODIFY_EXPR, void_type_node,
7575 build_fold_indirect_ref (arg1),
7576 build1 (IMAGPART_EXPR, type, call)),
7577 build2 (MODIFY_EXPR, void_type_node,
7578 build_fold_indirect_ref (arg2),
7579 build1 (REALPART_EXPR, type, call)));
7582 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7583 NULL_TREE if no simplification can be made. */
7585 static tree
7586 fold_builtin_cexp (tree arg0, tree type)
7588 tree rtype;
7589 tree realp, imagp, ifn;
7591 if (!validate_arg (arg0, COMPLEX_TYPE))
7592 return NULL_TREE;
7594 rtype = TREE_TYPE (TREE_TYPE (arg0));
7596 /* In case we can figure out the real part of arg0 and it is constant zero
7597 fold to cexpi. */
7598 if (!TARGET_C99_FUNCTIONS)
7599 return NULL_TREE;
7600 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7601 if (!ifn)
7602 return NULL_TREE;
7604 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7605 && real_zerop (realp))
7607 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7608 return build_call_expr (ifn, 1, narg);
7611 /* In case we can easily decompose real and imaginary parts split cexp
7612 to exp (r) * cexpi (i). */
7613 if (flag_unsafe_math_optimizations
7614 && realp)
7616 tree rfn, rcall, icall;
7618 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7619 if (!rfn)
7620 return NULL_TREE;
7622 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7623 if (!imagp)
7624 return NULL_TREE;
7626 icall = build_call_expr (ifn, 1, imagp);
7627 icall = builtin_save_expr (icall);
7628 rcall = build_call_expr (rfn, 1, realp);
7629 rcall = builtin_save_expr (rcall);
7630 return build2 (COMPLEX_EXPR, type,
7631 build2 (MULT_EXPR, rtype,
7632 rcall,
7633 build1 (REALPART_EXPR, rtype, icall)),
7634 build2 (MULT_EXPR, rtype,
7635 rcall,
7636 build1 (IMAGPART_EXPR, rtype, icall)));
7639 return NULL_TREE;
7642 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7643 Return NULL_TREE if no simplification can be made. */
7645 static tree
7646 fold_builtin_trunc (tree fndecl, tree arg)
7648 if (!validate_arg (arg, REAL_TYPE))
7649 return NULL_TREE;
7651 /* Optimize trunc of constant value. */
7652 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7654 REAL_VALUE_TYPE r, x;
7655 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7657 x = TREE_REAL_CST (arg);
7658 real_trunc (&r, TYPE_MODE (type), &x);
7659 return build_real (type, r);
7662 return fold_trunc_transparent_mathfn (fndecl, arg);
7665 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7666 Return NULL_TREE if no simplification can be made. */
7668 static tree
7669 fold_builtin_floor (tree fndecl, tree arg)
7671 if (!validate_arg (arg, REAL_TYPE))
7672 return NULL_TREE;
7674 /* Optimize floor of constant value. */
7675 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7677 REAL_VALUE_TYPE x;
7679 x = TREE_REAL_CST (arg);
7680 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7682 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7683 REAL_VALUE_TYPE r;
7685 real_floor (&r, TYPE_MODE (type), &x);
7686 return build_real (type, r);
7690 /* Fold floor (x) where x is nonnegative to trunc (x). */
7691 if (tree_expr_nonnegative_p (arg))
7693 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7694 if (truncfn)
7695 return build_call_expr (truncfn, 1, arg);
7698 return fold_trunc_transparent_mathfn (fndecl, arg);
7701 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7702 Return NULL_TREE if no simplification can be made. */
7704 static tree
7705 fold_builtin_ceil (tree fndecl, tree arg)
7707 if (!validate_arg (arg, REAL_TYPE))
7708 return NULL_TREE;
7710 /* Optimize ceil of constant value. */
7711 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7713 REAL_VALUE_TYPE x;
7715 x = TREE_REAL_CST (arg);
7716 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7718 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7719 REAL_VALUE_TYPE r;
7721 real_ceil (&r, TYPE_MODE (type), &x);
7722 return build_real (type, r);
7726 return fold_trunc_transparent_mathfn (fndecl, arg);
7729 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7730 Return NULL_TREE if no simplification can be made. */
7732 static tree
7733 fold_builtin_round (tree fndecl, tree arg)
7735 if (!validate_arg (arg, REAL_TYPE))
7736 return NULL_TREE;
7738 /* Optimize round of constant value. */
7739 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7741 REAL_VALUE_TYPE x;
7743 x = TREE_REAL_CST (arg);
7744 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7746 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7747 REAL_VALUE_TYPE r;
7749 real_round (&r, TYPE_MODE (type), &x);
7750 return build_real (type, r);
7754 return fold_trunc_transparent_mathfn (fndecl, arg);
7757 /* Fold function call to builtin lround, lroundf or lroundl (or the
7758 corresponding long long versions) and other rounding functions. ARG
7759 is the argument to the call. Return NULL_TREE if no simplification
7760 can be made. */
7762 static tree
7763 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7765 if (!validate_arg (arg, REAL_TYPE))
7766 return NULL_TREE;
7768 /* Optimize lround of constant value. */
7769 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7771 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7773 if (real_isfinite (&x))
7775 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7776 tree ftype = TREE_TYPE (arg);
7777 unsigned HOST_WIDE_INT lo2;
7778 HOST_WIDE_INT hi, lo;
7779 REAL_VALUE_TYPE r;
7781 switch (DECL_FUNCTION_CODE (fndecl))
7783 CASE_FLT_FN (BUILT_IN_LFLOOR):
7784 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7785 real_floor (&r, TYPE_MODE (ftype), &x);
7786 break;
7788 CASE_FLT_FN (BUILT_IN_LCEIL):
7789 CASE_FLT_FN (BUILT_IN_LLCEIL):
7790 real_ceil (&r, TYPE_MODE (ftype), &x);
7791 break;
7793 CASE_FLT_FN (BUILT_IN_LROUND):
7794 CASE_FLT_FN (BUILT_IN_LLROUND):
7795 real_round (&r, TYPE_MODE (ftype), &x);
7796 break;
7798 default:
7799 gcc_unreachable ();
7802 REAL_VALUE_TO_INT (&lo, &hi, r);
7803 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7804 return build_int_cst_wide (itype, lo2, hi);
7808 switch (DECL_FUNCTION_CODE (fndecl))
7810 CASE_FLT_FN (BUILT_IN_LFLOOR):
7811 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7812 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7813 if (tree_expr_nonnegative_p (arg))
7814 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7815 arg);
7816 break;
7817 default:;
7820 return fold_fixed_mathfn (fndecl, arg);
7823 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7824 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7825 the argument to the call. Return NULL_TREE if no simplification can
7826 be made. */
7828 static tree
7829 fold_builtin_bitop (tree fndecl, tree arg)
7831 if (!validate_arg (arg, INTEGER_TYPE))
7832 return NULL_TREE;
7834 /* Optimize for constant argument. */
7835 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7837 HOST_WIDE_INT hi, width, result;
7838 unsigned HOST_WIDE_INT lo;
7839 tree type;
7841 type = TREE_TYPE (arg);
7842 width = TYPE_PRECISION (type);
7843 lo = TREE_INT_CST_LOW (arg);
7845 /* Clear all the bits that are beyond the type's precision. */
7846 if (width > HOST_BITS_PER_WIDE_INT)
7848 hi = TREE_INT_CST_HIGH (arg);
7849 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7850 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7852 else
7854 hi = 0;
7855 if (width < HOST_BITS_PER_WIDE_INT)
7856 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7859 switch (DECL_FUNCTION_CODE (fndecl))
7861 CASE_INT_FN (BUILT_IN_FFS):
7862 if (lo != 0)
7863 result = exact_log2 (lo & -lo) + 1;
7864 else if (hi != 0)
7865 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7866 else
7867 result = 0;
7868 break;
7870 CASE_INT_FN (BUILT_IN_CLZ):
7871 if (hi != 0)
7872 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7873 else if (lo != 0)
7874 result = width - floor_log2 (lo) - 1;
7875 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7876 result = width;
7877 break;
7879 CASE_INT_FN (BUILT_IN_CTZ):
7880 if (lo != 0)
7881 result = exact_log2 (lo & -lo);
7882 else if (hi != 0)
7883 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7884 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7885 result = width;
7886 break;
7888 CASE_INT_FN (BUILT_IN_POPCOUNT):
7889 result = 0;
7890 while (lo)
7891 result++, lo &= lo - 1;
7892 while (hi)
7893 result++, hi &= hi - 1;
7894 break;
7896 CASE_INT_FN (BUILT_IN_PARITY):
7897 result = 0;
7898 while (lo)
7899 result++, lo &= lo - 1;
7900 while (hi)
7901 result++, hi &= hi - 1;
7902 result &= 1;
7903 break;
7905 default:
7906 gcc_unreachable ();
7909 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7912 return NULL_TREE;
7915 /* Fold function call to builtin_bswap and the long and long long
7916 variants. Return NULL_TREE if no simplification can be made. */
7917 static tree
7918 fold_builtin_bswap (tree fndecl, tree arg)
7920 if (! validate_arg (arg, INTEGER_TYPE))
7921 return NULL_TREE;
7923 /* Optimize constant value. */
7924 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7926 HOST_WIDE_INT hi, width, r_hi = 0;
7927 unsigned HOST_WIDE_INT lo, r_lo = 0;
7928 tree type;
7930 type = TREE_TYPE (arg);
7931 width = TYPE_PRECISION (type);
7932 lo = TREE_INT_CST_LOW (arg);
7933 hi = TREE_INT_CST_HIGH (arg);
7935 switch (DECL_FUNCTION_CODE (fndecl))
7937 case BUILT_IN_BSWAP32:
7938 case BUILT_IN_BSWAP64:
7940 int s;
7942 for (s = 0; s < width; s += 8)
7944 int d = width - s - 8;
7945 unsigned HOST_WIDE_INT byte;
7947 if (s < HOST_BITS_PER_WIDE_INT)
7948 byte = (lo >> s) & 0xff;
7949 else
7950 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7952 if (d < HOST_BITS_PER_WIDE_INT)
7953 r_lo |= byte << d;
7954 else
7955 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7959 break;
7961 default:
7962 gcc_unreachable ();
7965 if (width < HOST_BITS_PER_WIDE_INT)
7966 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7967 else
7968 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7971 return NULL_TREE;
7974 /* Return true if EXPR is the real constant contained in VALUE. */
7976 static bool
7977 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
7979 STRIP_NOPS (expr);
7981 return ((TREE_CODE (expr) == REAL_CST
7982 && !TREE_OVERFLOW (expr)
7983 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
7984 || (TREE_CODE (expr) == COMPLEX_CST
7985 && real_dconstp (TREE_REALPART (expr), value)
7986 && real_zerop (TREE_IMAGPART (expr))));
7989 /* A subroutine of fold_builtin to fold the various logarithmic
7990 functions. Return NULL_TREE if no simplification can me made.
7991 FUNC is the corresponding MPFR logarithm function. */
7993 static tree
7994 fold_builtin_logarithm (tree fndecl, tree arg,
7995 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7997 if (validate_arg (arg, REAL_TYPE))
7999 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8000 tree res;
8001 const enum built_in_function fcode = builtin_mathfn_code (arg);
8003 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8004 instead we'll look for 'e' truncated to MODE. So only do
8005 this if flag_unsafe_math_optimizations is set. */
8006 if (flag_unsafe_math_optimizations && func == mpfr_log)
8008 const REAL_VALUE_TYPE e_truncated =
8009 real_value_truncate (TYPE_MODE (type), dconste);
8010 if (real_dconstp (arg, &e_truncated))
8011 return build_real (type, dconst1);
8014 /* Calculate the result when the argument is a constant. */
8015 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8016 return res;
8018 /* Special case, optimize logN(expN(x)) = x. */
8019 if (flag_unsafe_math_optimizations
8020 && ((func == mpfr_log
8021 && (fcode == BUILT_IN_EXP
8022 || fcode == BUILT_IN_EXPF
8023 || fcode == BUILT_IN_EXPL))
8024 || (func == mpfr_log2
8025 && (fcode == BUILT_IN_EXP2
8026 || fcode == BUILT_IN_EXP2F
8027 || fcode == BUILT_IN_EXP2L))
8028 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8029 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8031 /* Optimize logN(func()) for various exponential functions. We
8032 want to determine the value "x" and the power "exponent" in
8033 order to transform logN(x**exponent) into exponent*logN(x). */
8034 if (flag_unsafe_math_optimizations)
8036 tree exponent = 0, x = 0;
8038 switch (fcode)
8040 CASE_FLT_FN (BUILT_IN_EXP):
8041 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8042 x = build_real (type,
8043 real_value_truncate (TYPE_MODE (type), dconste));
8044 exponent = CALL_EXPR_ARG (arg, 0);
8045 break;
8046 CASE_FLT_FN (BUILT_IN_EXP2):
8047 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8048 x = build_real (type, dconst2);
8049 exponent = CALL_EXPR_ARG (arg, 0);
8050 break;
8051 CASE_FLT_FN (BUILT_IN_EXP10):
8052 CASE_FLT_FN (BUILT_IN_POW10):
8053 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8054 x = build_real (type, dconst10);
8055 exponent = CALL_EXPR_ARG (arg, 0);
8056 break;
8057 CASE_FLT_FN (BUILT_IN_SQRT):
8058 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8059 x = CALL_EXPR_ARG (arg, 0);
8060 exponent = build_real (type, dconsthalf);
8061 break;
8062 CASE_FLT_FN (BUILT_IN_CBRT):
8063 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8064 x = CALL_EXPR_ARG (arg, 0);
8065 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8066 dconstthird));
8067 break;
8068 CASE_FLT_FN (BUILT_IN_POW):
8069 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8070 x = CALL_EXPR_ARG (arg, 0);
8071 exponent = CALL_EXPR_ARG (arg, 1);
8072 break;
8073 default:
8074 break;
8077 /* Now perform the optimization. */
8078 if (x && exponent)
8080 tree logfn = build_call_expr (fndecl, 1, x);
8081 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8086 return NULL_TREE;
8089 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8090 NULL_TREE if no simplification can be made. */
8092 static tree
8093 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8095 tree res, narg0, narg1;
8097 if (!validate_arg (arg0, REAL_TYPE)
8098 || !validate_arg (arg1, REAL_TYPE))
8099 return NULL_TREE;
8101 /* Calculate the result when the argument is a constant. */
8102 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8103 return res;
8105 /* If either argument to hypot has a negate or abs, strip that off.
8106 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8107 narg0 = fold_strip_sign_ops (arg0);
8108 narg1 = fold_strip_sign_ops (arg1);
8109 if (narg0 || narg1)
8111 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8112 narg1 ? narg1 : arg1);
8115 /* If either argument is zero, hypot is fabs of the other. */
8116 if (real_zerop (arg0))
8117 return fold_build1 (ABS_EXPR, type, arg1);
8118 else if (real_zerop (arg1))
8119 return fold_build1 (ABS_EXPR, type, arg0);
8121 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8122 if (flag_unsafe_math_optimizations
8123 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8125 const REAL_VALUE_TYPE sqrt2_trunc
8126 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
8127 return fold_build2 (MULT_EXPR, type,
8128 fold_build1 (ABS_EXPR, type, arg0),
8129 build_real (type, sqrt2_trunc));
8132 return NULL_TREE;
8136 /* Fold a builtin function call to pow, powf, or powl. Return
8137 NULL_TREE if no simplification can be made. */
8138 static tree
8139 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8141 tree res;
8143 if (!validate_arg (arg0, REAL_TYPE)
8144 || !validate_arg (arg1, REAL_TYPE))
8145 return NULL_TREE;
8147 /* Calculate the result when the argument is a constant. */
8148 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8149 return res;
8151 /* Optimize pow(1.0,y) = 1.0. */
8152 if (real_onep (arg0))
8153 return omit_one_operand (type, build_real (type, dconst1), arg1);
8155 if (TREE_CODE (arg1) == REAL_CST
8156 && !TREE_OVERFLOW (arg1))
8158 REAL_VALUE_TYPE cint;
8159 REAL_VALUE_TYPE c;
8160 HOST_WIDE_INT n;
8162 c = TREE_REAL_CST (arg1);
8164 /* Optimize pow(x,0.0) = 1.0. */
8165 if (REAL_VALUES_EQUAL (c, dconst0))
8166 return omit_one_operand (type, build_real (type, dconst1),
8167 arg0);
8169 /* Optimize pow(x,1.0) = x. */
8170 if (REAL_VALUES_EQUAL (c, dconst1))
8171 return arg0;
8173 /* Optimize pow(x,-1.0) = 1.0/x. */
8174 if (REAL_VALUES_EQUAL (c, dconstm1))
8175 return fold_build2 (RDIV_EXPR, type,
8176 build_real (type, dconst1), arg0);
8178 /* Optimize pow(x,0.5) = sqrt(x). */
8179 if (flag_unsafe_math_optimizations
8180 && REAL_VALUES_EQUAL (c, dconsthalf))
8182 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8184 if (sqrtfn != NULL_TREE)
8185 return build_call_expr (sqrtfn, 1, arg0);
8188 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8189 if (flag_unsafe_math_optimizations)
8191 const REAL_VALUE_TYPE dconstroot
8192 = real_value_truncate (TYPE_MODE (type), dconstthird);
8194 if (REAL_VALUES_EQUAL (c, dconstroot))
8196 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8197 if (cbrtfn != NULL_TREE)
8198 return build_call_expr (cbrtfn, 1, arg0);
8202 /* Check for an integer exponent. */
8203 n = real_to_integer (&c);
8204 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8205 if (real_identical (&c, &cint))
8207 /* Attempt to evaluate pow at compile-time. */
8208 if (TREE_CODE (arg0) == REAL_CST
8209 && !TREE_OVERFLOW (arg0))
8211 REAL_VALUE_TYPE x;
8212 bool inexact;
8214 x = TREE_REAL_CST (arg0);
8215 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8216 if (flag_unsafe_math_optimizations || !inexact)
8217 return build_real (type, x);
8220 /* Strip sign ops from even integer powers. */
8221 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8223 tree narg0 = fold_strip_sign_ops (arg0);
8224 if (narg0)
8225 return build_call_expr (fndecl, 2, narg0, arg1);
8230 if (flag_unsafe_math_optimizations)
8232 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8234 /* Optimize pow(expN(x),y) = expN(x*y). */
8235 if (BUILTIN_EXPONENT_P (fcode))
8237 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8238 tree arg = CALL_EXPR_ARG (arg0, 0);
8239 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8240 return build_call_expr (expfn, 1, arg);
8243 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8244 if (BUILTIN_SQRT_P (fcode))
8246 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8247 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8248 build_real (type, dconsthalf));
8249 return build_call_expr (fndecl, 2, narg0, narg1);
8252 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8253 if (BUILTIN_CBRT_P (fcode))
8255 tree arg = CALL_EXPR_ARG (arg0, 0);
8256 if (tree_expr_nonnegative_p (arg))
8258 const REAL_VALUE_TYPE dconstroot
8259 = real_value_truncate (TYPE_MODE (type), dconstthird);
8260 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8261 build_real (type, dconstroot));
8262 return build_call_expr (fndecl, 2, arg, narg1);
8266 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8267 if (fcode == BUILT_IN_POW
8268 || fcode == BUILT_IN_POWF
8269 || fcode == BUILT_IN_POWL)
8271 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8272 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8273 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8274 return build_call_expr (fndecl, 2, arg00, narg1);
8278 return NULL_TREE;
8281 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8282 Return NULL_TREE if no simplification can be made. */
8283 static tree
8284 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8285 tree arg0, tree arg1, tree type)
8287 if (!validate_arg (arg0, REAL_TYPE)
8288 || !validate_arg (arg1, INTEGER_TYPE))
8289 return NULL_TREE;
8291 /* Optimize pow(1.0,y) = 1.0. */
8292 if (real_onep (arg0))
8293 return omit_one_operand (type, build_real (type, dconst1), arg1);
8295 if (host_integerp (arg1, 0))
8297 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8299 /* Evaluate powi at compile-time. */
8300 if (TREE_CODE (arg0) == REAL_CST
8301 && !TREE_OVERFLOW (arg0))
8303 REAL_VALUE_TYPE x;
8304 x = TREE_REAL_CST (arg0);
8305 real_powi (&x, TYPE_MODE (type), &x, c);
8306 return build_real (type, x);
8309 /* Optimize pow(x,0) = 1.0. */
8310 if (c == 0)
8311 return omit_one_operand (type, build_real (type, dconst1),
8312 arg0);
8314 /* Optimize pow(x,1) = x. */
8315 if (c == 1)
8316 return arg0;
8318 /* Optimize pow(x,-1) = 1.0/x. */
8319 if (c == -1)
8320 return fold_build2 (RDIV_EXPR, type,
8321 build_real (type, dconst1), arg0);
8324 return NULL_TREE;
8327 /* A subroutine of fold_builtin to fold the various exponent
8328 functions. Return NULL_TREE if no simplification can be made.
8329 FUNC is the corresponding MPFR exponent function. */
8331 static tree
8332 fold_builtin_exponent (tree fndecl, tree arg,
8333 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8335 if (validate_arg (arg, REAL_TYPE))
8337 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8338 tree res;
8340 /* Calculate the result when the argument is a constant. */
8341 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8342 return res;
8344 /* Optimize expN(logN(x)) = x. */
8345 if (flag_unsafe_math_optimizations)
8347 const enum built_in_function fcode = builtin_mathfn_code (arg);
8349 if ((func == mpfr_exp
8350 && (fcode == BUILT_IN_LOG
8351 || fcode == BUILT_IN_LOGF
8352 || fcode == BUILT_IN_LOGL))
8353 || (func == mpfr_exp2
8354 && (fcode == BUILT_IN_LOG2
8355 || fcode == BUILT_IN_LOG2F
8356 || fcode == BUILT_IN_LOG2L))
8357 || (func == mpfr_exp10
8358 && (fcode == BUILT_IN_LOG10
8359 || fcode == BUILT_IN_LOG10F
8360 || fcode == BUILT_IN_LOG10L)))
8361 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8365 return NULL_TREE;
8368 /* Return true if VAR is a VAR_DECL or a component thereof. */
8370 static bool
8371 var_decl_component_p (tree var)
8373 tree inner = var;
8374 while (handled_component_p (inner))
8375 inner = TREE_OPERAND (inner, 0);
8376 return SSA_VAR_P (inner);
8379 /* Fold function call to builtin memset. Return
8380 NULL_TREE if no simplification can be made. */
8382 static tree
8383 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8385 tree var, ret;
8386 unsigned HOST_WIDE_INT length, cval;
8388 if (! validate_arg (dest, POINTER_TYPE)
8389 || ! validate_arg (c, INTEGER_TYPE)
8390 || ! validate_arg (len, INTEGER_TYPE))
8391 return NULL_TREE;
8393 if (! host_integerp (len, 1))
8394 return NULL_TREE;
8396 /* If the LEN parameter is zero, return DEST. */
8397 if (integer_zerop (len))
8398 return omit_one_operand (type, dest, c);
8400 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8401 return NULL_TREE;
8403 var = dest;
8404 STRIP_NOPS (var);
8405 if (TREE_CODE (var) != ADDR_EXPR)
8406 return NULL_TREE;
8408 var = TREE_OPERAND (var, 0);
8409 if (TREE_THIS_VOLATILE (var))
8410 return NULL_TREE;
8412 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8413 && !POINTER_TYPE_P (TREE_TYPE (var)))
8414 return NULL_TREE;
8416 if (! var_decl_component_p (var))
8417 return NULL_TREE;
8419 length = tree_low_cst (len, 1);
8420 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8421 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8422 < (int) length)
8423 return NULL_TREE;
8425 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8426 return NULL_TREE;
8428 if (integer_zerop (c))
8429 cval = 0;
8430 else
8432 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8433 return NULL_TREE;
8435 cval = tree_low_cst (c, 1);
8436 cval &= 0xff;
8437 cval |= cval << 8;
8438 cval |= cval << 16;
8439 cval |= (cval << 31) << 1;
8442 ret = build_int_cst_type (TREE_TYPE (var), cval);
8443 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8444 if (ignore)
8445 return ret;
8447 return omit_one_operand (type, dest, ret);
8450 /* Fold function call to builtin memset. Return
8451 NULL_TREE if no simplification can be made. */
8453 static tree
8454 fold_builtin_bzero (tree dest, tree size, bool ignore)
8456 if (! validate_arg (dest, POINTER_TYPE)
8457 || ! validate_arg (size, INTEGER_TYPE))
8458 return NULL_TREE;
8460 if (!ignore)
8461 return NULL_TREE;
8463 /* New argument list transforming bzero(ptr x, int y) to
8464 memset(ptr x, int 0, size_t y). This is done this way
8465 so that if it isn't expanded inline, we fallback to
8466 calling bzero instead of memset. */
8468 return fold_builtin_memset (dest, integer_zero_node,
8469 fold_convert (sizetype, size),
8470 void_type_node, ignore);
8473 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8474 NULL_TREE if no simplification can be made.
8475 If ENDP is 0, return DEST (like memcpy).
8476 If ENDP is 1, return DEST+LEN (like mempcpy).
8477 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8478 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8479 (memmove). */
8481 static tree
8482 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8484 tree destvar, srcvar, expr;
8486 if (! validate_arg (dest, POINTER_TYPE)
8487 || ! validate_arg (src, POINTER_TYPE)
8488 || ! validate_arg (len, INTEGER_TYPE))
8489 return NULL_TREE;
8491 /* If the LEN parameter is zero, return DEST. */
8492 if (integer_zerop (len))
8493 return omit_one_operand (type, dest, src);
8495 /* If SRC and DEST are the same (and not volatile), return
8496 DEST{,+LEN,+LEN-1}. */
8497 if (operand_equal_p (src, dest, 0))
8498 expr = len;
8499 else
8501 tree srctype, desttype;
8502 if (endp == 3)
8504 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8505 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8507 /* Both DEST and SRC must be pointer types.
8508 ??? This is what old code did. Is the testing for pointer types
8509 really mandatory?
8511 If either SRC is readonly or length is 1, we can use memcpy. */
8512 if (dest_align && src_align
8513 && (readonly_data_expr (src)
8514 || (host_integerp (len, 1)
8515 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8516 tree_low_cst (len, 1)))))
8518 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8519 if (!fn)
8520 return NULL_TREE;
8521 return build_call_expr (fn, 3, dest, src, len);
8523 return NULL_TREE;
8526 if (!host_integerp (len, 0))
8527 return NULL_TREE;
8528 /* FIXME:
8529 This logic lose for arguments like (type *)malloc (sizeof (type)),
8530 since we strip the casts of up to VOID return value from malloc.
8531 Perhaps we ought to inherit type from non-VOID argument here? */
8532 STRIP_NOPS (src);
8533 STRIP_NOPS (dest);
8534 srctype = TREE_TYPE (TREE_TYPE (src));
8535 desttype = TREE_TYPE (TREE_TYPE (dest));
8536 if (!srctype || !desttype
8537 || !TYPE_SIZE_UNIT (srctype)
8538 || !TYPE_SIZE_UNIT (desttype)
8539 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8540 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8541 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8542 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8543 return NULL_TREE;
8545 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8546 < (int) TYPE_ALIGN (desttype)
8547 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8548 < (int) TYPE_ALIGN (srctype)))
8549 return NULL_TREE;
8551 if (!ignore)
8552 dest = builtin_save_expr (dest);
8554 srcvar = build_fold_indirect_ref (src);
8555 if (TREE_THIS_VOLATILE (srcvar))
8556 return NULL_TREE;
8557 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8558 return NULL_TREE;
8559 /* With memcpy, it is possible to bypass aliasing rules, so without
8560 this check i. e. execute/20060930-2.c would be misoptimized, because
8561 it use conflicting alias set to hold argument for the memcpy call.
8562 This check is probably unnecesary with -fno-strict-aliasing.
8563 Similarly for destvar. See also PR29286. */
8564 if (!var_decl_component_p (srcvar)
8565 /* Accept: memcpy (*char_var, "test", 1); that simplify
8566 to char_var='t'; */
8567 || is_gimple_min_invariant (srcvar)
8568 || readonly_data_expr (src))
8569 return NULL_TREE;
8571 destvar = build_fold_indirect_ref (dest);
8572 if (TREE_THIS_VOLATILE (destvar))
8573 return NULL_TREE;
8574 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8575 return NULL_TREE;
8576 if (!var_decl_component_p (destvar))
8577 return NULL_TREE;
8579 if (srctype == desttype
8580 || (gimple_in_ssa_p (cfun)
8581 && useless_type_conversion_p (desttype, srctype)))
8582 expr = srcvar;
8583 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8584 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8585 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8586 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8587 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8588 else
8589 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8590 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8593 if (ignore)
8594 return expr;
8596 if (endp == 0 || endp == 3)
8597 return omit_one_operand (type, dest, expr);
8599 if (expr == len)
8600 expr = NULL_TREE;
8602 if (endp == 2)
8603 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8604 ssize_int (1));
8606 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8607 dest = fold_convert (type, dest);
8608 if (expr)
8609 dest = omit_one_operand (type, dest, expr);
8610 return dest;
8613 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8614 If LEN is not NULL, it represents the length of the string to be
8615 copied. Return NULL_TREE if no simplification can be made. */
8617 tree
8618 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8620 tree fn;
8622 if (!validate_arg (dest, POINTER_TYPE)
8623 || !validate_arg (src, POINTER_TYPE))
8624 return NULL_TREE;
8626 /* If SRC and DEST are the same (and not volatile), return DEST. */
8627 if (operand_equal_p (src, dest, 0))
8628 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8630 if (optimize_size)
8631 return NULL_TREE;
8633 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8634 if (!fn)
8635 return NULL_TREE;
8637 if (!len)
8639 len = c_strlen (src, 1);
8640 if (! len || TREE_SIDE_EFFECTS (len))
8641 return NULL_TREE;
8644 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8645 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8646 build_call_expr (fn, 3, dest, src, len));
8649 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8650 If SLEN is not NULL, it represents the length of the source string.
8651 Return NULL_TREE if no simplification can be made. */
8653 tree
8654 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8656 tree fn;
8658 if (!validate_arg (dest, POINTER_TYPE)
8659 || !validate_arg (src, POINTER_TYPE)
8660 || !validate_arg (len, INTEGER_TYPE))
8661 return NULL_TREE;
8663 /* If the LEN parameter is zero, return DEST. */
8664 if (integer_zerop (len))
8665 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8667 /* We can't compare slen with len as constants below if len is not a
8668 constant. */
8669 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8670 return NULL_TREE;
8672 if (!slen)
8673 slen = c_strlen (src, 1);
8675 /* Now, we must be passed a constant src ptr parameter. */
8676 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8677 return NULL_TREE;
8679 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8681 /* We do not support simplification of this case, though we do
8682 support it when expanding trees into RTL. */
8683 /* FIXME: generate a call to __builtin_memset. */
8684 if (tree_int_cst_lt (slen, len))
8685 return NULL_TREE;
8687 /* OK transform into builtin memcpy. */
8688 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8689 if (!fn)
8690 return NULL_TREE;
8691 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8692 build_call_expr (fn, 3, dest, src, len));
8695 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8696 arguments to the call, and TYPE is its return type.
8697 Return NULL_TREE if no simplification can be made. */
8699 static tree
8700 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8702 if (!validate_arg (arg1, POINTER_TYPE)
8703 || !validate_arg (arg2, INTEGER_TYPE)
8704 || !validate_arg (len, INTEGER_TYPE))
8705 return NULL_TREE;
8706 else
8708 const char *p1;
8710 if (TREE_CODE (arg2) != INTEGER_CST
8711 || !host_integerp (len, 1))
8712 return NULL_TREE;
8714 p1 = c_getstr (arg1);
8715 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8717 char c;
8718 const char *r;
8719 tree tem;
8721 if (target_char_cast (arg2, &c))
8722 return NULL_TREE;
8724 r = memchr (p1, c, tree_low_cst (len, 1));
8726 if (r == NULL)
8727 return build_int_cst (TREE_TYPE (arg1), 0);
8729 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8730 size_int (r - p1));
8731 return fold_convert (type, tem);
8733 return NULL_TREE;
8737 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8738 Return NULL_TREE if no simplification can be made. */
8740 static tree
8741 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8743 const char *p1, *p2;
8745 if (!validate_arg (arg1, POINTER_TYPE)
8746 || !validate_arg (arg2, POINTER_TYPE)
8747 || !validate_arg (len, INTEGER_TYPE))
8748 return NULL_TREE;
8750 /* If the LEN parameter is zero, return zero. */
8751 if (integer_zerop (len))
8752 return omit_two_operands (integer_type_node, integer_zero_node,
8753 arg1, arg2);
8755 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8756 if (operand_equal_p (arg1, arg2, 0))
8757 return omit_one_operand (integer_type_node, integer_zero_node, len);
8759 p1 = c_getstr (arg1);
8760 p2 = c_getstr (arg2);
8762 /* If all arguments are constant, and the value of len is not greater
8763 than the lengths of arg1 and arg2, evaluate at compile-time. */
8764 if (host_integerp (len, 1) && p1 && p2
8765 && compare_tree_int (len, strlen (p1) + 1) <= 0
8766 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8768 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8770 if (r > 0)
8771 return integer_one_node;
8772 else if (r < 0)
8773 return integer_minus_one_node;
8774 else
8775 return integer_zero_node;
8778 /* If len parameter is one, return an expression corresponding to
8779 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8780 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8782 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8783 tree cst_uchar_ptr_node
8784 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8786 tree ind1 = fold_convert (integer_type_node,
8787 build1 (INDIRECT_REF, cst_uchar_node,
8788 fold_convert (cst_uchar_ptr_node,
8789 arg1)));
8790 tree ind2 = fold_convert (integer_type_node,
8791 build1 (INDIRECT_REF, cst_uchar_node,
8792 fold_convert (cst_uchar_ptr_node,
8793 arg2)));
8794 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8797 return NULL_TREE;
8800 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8801 Return NULL_TREE if no simplification can be made. */
8803 static tree
8804 fold_builtin_strcmp (tree arg1, tree arg2)
8806 const char *p1, *p2;
8808 if (!validate_arg (arg1, POINTER_TYPE)
8809 || !validate_arg (arg2, POINTER_TYPE))
8810 return NULL_TREE;
8812 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8813 if (operand_equal_p (arg1, arg2, 0))
8814 return integer_zero_node;
8816 p1 = c_getstr (arg1);
8817 p2 = c_getstr (arg2);
8819 if (p1 && p2)
8821 const int i = strcmp (p1, p2);
8822 if (i < 0)
8823 return integer_minus_one_node;
8824 else if (i > 0)
8825 return integer_one_node;
8826 else
8827 return integer_zero_node;
8830 /* If the second arg is "", return *(const unsigned char*)arg1. */
8831 if (p2 && *p2 == '\0')
8833 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8834 tree cst_uchar_ptr_node
8835 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8837 return fold_convert (integer_type_node,
8838 build1 (INDIRECT_REF, cst_uchar_node,
8839 fold_convert (cst_uchar_ptr_node,
8840 arg1)));
8843 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8844 if (p1 && *p1 == '\0')
8846 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8847 tree cst_uchar_ptr_node
8848 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8850 tree temp = fold_convert (integer_type_node,
8851 build1 (INDIRECT_REF, cst_uchar_node,
8852 fold_convert (cst_uchar_ptr_node,
8853 arg2)));
8854 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8857 return NULL_TREE;
8860 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8861 Return NULL_TREE if no simplification can be made. */
8863 static tree
8864 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
8866 const char *p1, *p2;
8868 if (!validate_arg (arg1, POINTER_TYPE)
8869 || !validate_arg (arg2, POINTER_TYPE)
8870 || !validate_arg (len, INTEGER_TYPE))
8871 return NULL_TREE;
8873 /* If the LEN parameter is zero, return zero. */
8874 if (integer_zerop (len))
8875 return omit_two_operands (integer_type_node, integer_zero_node,
8876 arg1, arg2);
8878 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8879 if (operand_equal_p (arg1, arg2, 0))
8880 return omit_one_operand (integer_type_node, integer_zero_node, len);
8882 p1 = c_getstr (arg1);
8883 p2 = c_getstr (arg2);
8885 if (host_integerp (len, 1) && p1 && p2)
8887 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8888 if (i > 0)
8889 return integer_one_node;
8890 else if (i < 0)
8891 return integer_minus_one_node;
8892 else
8893 return integer_zero_node;
8896 /* If the second arg is "", and the length is greater than zero,
8897 return *(const unsigned char*)arg1. */
8898 if (p2 && *p2 == '\0'
8899 && TREE_CODE (len) == INTEGER_CST
8900 && tree_int_cst_sgn (len) == 1)
8902 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8903 tree cst_uchar_ptr_node
8904 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8906 return fold_convert (integer_type_node,
8907 build1 (INDIRECT_REF, cst_uchar_node,
8908 fold_convert (cst_uchar_ptr_node,
8909 arg1)));
8912 /* If the first arg is "", and the length is greater than zero,
8913 return -*(const unsigned char*)arg2. */
8914 if (p1 && *p1 == '\0'
8915 && TREE_CODE (len) == INTEGER_CST
8916 && tree_int_cst_sgn (len) == 1)
8918 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8919 tree cst_uchar_ptr_node
8920 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8922 tree temp = fold_convert (integer_type_node,
8923 build1 (INDIRECT_REF, cst_uchar_node,
8924 fold_convert (cst_uchar_ptr_node,
8925 arg2)));
8926 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8929 /* If len parameter is one, return an expression corresponding to
8930 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8931 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8933 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8934 tree cst_uchar_ptr_node
8935 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8937 tree ind1 = fold_convert (integer_type_node,
8938 build1 (INDIRECT_REF, cst_uchar_node,
8939 fold_convert (cst_uchar_ptr_node,
8940 arg1)));
8941 tree ind2 = fold_convert (integer_type_node,
8942 build1 (INDIRECT_REF, cst_uchar_node,
8943 fold_convert (cst_uchar_ptr_node,
8944 arg2)));
8945 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8948 return NULL_TREE;
8951 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8952 ARG. Return NULL_TREE if no simplification can be made. */
8954 static tree
8955 fold_builtin_signbit (tree arg, tree type)
8957 tree temp;
8959 if (!validate_arg (arg, REAL_TYPE))
8960 return NULL_TREE;
8962 /* If ARG is a compile-time constant, determine the result. */
8963 if (TREE_CODE (arg) == REAL_CST
8964 && !TREE_OVERFLOW (arg))
8966 REAL_VALUE_TYPE c;
8968 c = TREE_REAL_CST (arg);
8969 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8970 return fold_convert (type, temp);
8973 /* If ARG is non-negative, the result is always zero. */
8974 if (tree_expr_nonnegative_p (arg))
8975 return omit_one_operand (type, integer_zero_node, arg);
8977 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8978 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8979 return fold_build2 (LT_EXPR, type, arg,
8980 build_real (TREE_TYPE (arg), dconst0));
8982 return NULL_TREE;
8985 /* Fold function call to builtin copysign, copysignf or copysignl with
8986 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8987 be made. */
8989 static tree
8990 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
8992 tree tem;
8994 if (!validate_arg (arg1, REAL_TYPE)
8995 || !validate_arg (arg2, REAL_TYPE))
8996 return NULL_TREE;
8998 /* copysign(X,X) is X. */
8999 if (operand_equal_p (arg1, arg2, 0))
9000 return fold_convert (type, arg1);
9002 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9003 if (TREE_CODE (arg1) == REAL_CST
9004 && TREE_CODE (arg2) == REAL_CST
9005 && !TREE_OVERFLOW (arg1)
9006 && !TREE_OVERFLOW (arg2))
9008 REAL_VALUE_TYPE c1, c2;
9010 c1 = TREE_REAL_CST (arg1);
9011 c2 = TREE_REAL_CST (arg2);
9012 /* c1.sign := c2.sign. */
9013 real_copysign (&c1, &c2);
9014 return build_real (type, c1);
9017 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9018 Remember to evaluate Y for side-effects. */
9019 if (tree_expr_nonnegative_p (arg2))
9020 return omit_one_operand (type,
9021 fold_build1 (ABS_EXPR, type, arg1),
9022 arg2);
9024 /* Strip sign changing operations for the first argument. */
9025 tem = fold_strip_sign_ops (arg1);
9026 if (tem)
9027 return build_call_expr (fndecl, 2, tem, arg2);
9029 return NULL_TREE;
9032 /* Fold a call to builtin isascii with argument ARG. */
9034 static tree
9035 fold_builtin_isascii (tree arg)
9037 if (!validate_arg (arg, INTEGER_TYPE))
9038 return NULL_TREE;
9039 else
9041 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9042 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9043 build_int_cst (NULL_TREE,
9044 ~ (unsigned HOST_WIDE_INT) 0x7f));
9045 return fold_build2 (EQ_EXPR, integer_type_node,
9046 arg, integer_zero_node);
9050 /* Fold a call to builtin toascii with argument ARG. */
9052 static tree
9053 fold_builtin_toascii (tree arg)
9055 if (!validate_arg (arg, INTEGER_TYPE))
9056 return NULL_TREE;
9058 /* Transform toascii(c) -> (c & 0x7f). */
9059 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9060 build_int_cst (NULL_TREE, 0x7f));
9063 /* Fold a call to builtin isdigit with argument ARG. */
9065 static tree
9066 fold_builtin_isdigit (tree arg)
9068 if (!validate_arg (arg, INTEGER_TYPE))
9069 return NULL_TREE;
9070 else
9072 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9073 /* According to the C standard, isdigit is unaffected by locale.
9074 However, it definitely is affected by the target character set. */
9075 unsigned HOST_WIDE_INT target_digit0
9076 = lang_hooks.to_target_charset ('0');
9078 if (target_digit0 == 0)
9079 return NULL_TREE;
9081 arg = fold_convert (unsigned_type_node, arg);
9082 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9083 build_int_cst (unsigned_type_node, target_digit0));
9084 return fold_build2 (LE_EXPR, integer_type_node, arg,
9085 build_int_cst (unsigned_type_node, 9));
9089 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9091 static tree
9092 fold_builtin_fabs (tree arg, tree type)
9094 if (!validate_arg (arg, REAL_TYPE))
9095 return NULL_TREE;
9097 arg = fold_convert (type, arg);
9098 if (TREE_CODE (arg) == REAL_CST)
9099 return fold_abs_const (arg, type);
9100 return fold_build1 (ABS_EXPR, type, arg);
9103 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9105 static tree
9106 fold_builtin_abs (tree arg, tree type)
9108 if (!validate_arg (arg, INTEGER_TYPE))
9109 return NULL_TREE;
9111 arg = fold_convert (type, arg);
9112 if (TREE_CODE (arg) == INTEGER_CST)
9113 return fold_abs_const (arg, type);
9114 return fold_build1 (ABS_EXPR, type, arg);
9117 /* Fold a call to builtin fmin or fmax. */
9119 static tree
9120 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9122 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9124 /* Calculate the result when the argument is a constant. */
9125 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9127 if (res)
9128 return res;
9130 /* If either argument is NaN, return the other one. Avoid the
9131 transformation if we get (and honor) a signalling NaN. Using
9132 omit_one_operand() ensures we create a non-lvalue. */
9133 if (TREE_CODE (arg0) == REAL_CST
9134 && real_isnan (&TREE_REAL_CST (arg0))
9135 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9136 || ! TREE_REAL_CST (arg0).signalling))
9137 return omit_one_operand (type, arg1, arg0);
9138 if (TREE_CODE (arg1) == REAL_CST
9139 && real_isnan (&TREE_REAL_CST (arg1))
9140 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9141 || ! TREE_REAL_CST (arg1).signalling))
9142 return omit_one_operand (type, arg0, arg1);
9144 /* Transform fmin/fmax(x,x) -> x. */
9145 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9146 return omit_one_operand (type, arg0, arg1);
9148 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9149 functions to return the numeric arg if the other one is NaN.
9150 These tree codes don't honor that, so only transform if
9151 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9152 handled, so we don't have to worry about it either. */
9153 if (flag_finite_math_only)
9154 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9155 fold_convert (type, arg0),
9156 fold_convert (type, arg1));
9158 return NULL_TREE;
9161 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9163 static tree
9164 fold_builtin_carg (tree arg, tree type)
9166 if (validate_arg (arg, COMPLEX_TYPE))
9168 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9170 if (atan2_fn)
9172 tree new_arg = builtin_save_expr (arg);
9173 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9174 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9175 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9179 return NULL_TREE;
9182 /* Fold a call to builtin logb/ilogb. */
9184 static tree
9185 fold_builtin_logb (tree arg, tree rettype)
9187 if (! validate_arg (arg, REAL_TYPE))
9188 return NULL_TREE;
9190 STRIP_NOPS (arg);
9192 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9194 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9196 switch (value->cl)
9198 case rvc_nan:
9199 case rvc_inf:
9200 /* If arg is Inf or NaN and we're logb, return it. */
9201 if (TREE_CODE (rettype) == REAL_TYPE)
9202 return fold_convert (rettype, arg);
9203 /* Fall through... */
9204 case rvc_zero:
9205 /* Zero may set errno and/or raise an exception for logb, also
9206 for ilogb we don't know FP_ILOGB0. */
9207 return NULL_TREE;
9208 case rvc_normal:
9209 /* For normal numbers, proceed iff radix == 2. In GCC,
9210 normalized significands are in the range [0.5, 1.0). We
9211 want the exponent as if they were [1.0, 2.0) so get the
9212 exponent and subtract 1. */
9213 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9214 return fold_convert (rettype, build_int_cst (NULL_TREE,
9215 REAL_EXP (value)-1));
9216 break;
9220 return NULL_TREE;
9223 /* Fold a call to builtin significand, if radix == 2. */
9225 static tree
9226 fold_builtin_significand (tree arg, tree rettype)
9228 if (! validate_arg (arg, REAL_TYPE))
9229 return NULL_TREE;
9231 STRIP_NOPS (arg);
9233 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9235 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9237 switch (value->cl)
9239 case rvc_zero:
9240 case rvc_nan:
9241 case rvc_inf:
9242 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9243 return fold_convert (rettype, arg);
9244 case rvc_normal:
9245 /* For normal numbers, proceed iff radix == 2. */
9246 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9248 REAL_VALUE_TYPE result = *value;
9249 /* In GCC, normalized significands are in the range [0.5,
9250 1.0). We want them to be [1.0, 2.0) so set the
9251 exponent to 1. */
9252 SET_REAL_EXP (&result, 1);
9253 return build_real (rettype, result);
9255 break;
9259 return NULL_TREE;
9262 /* Fold a call to builtin frexp, we can assume the base is 2. */
9264 static tree
9265 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9267 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9268 return NULL_TREE;
9270 STRIP_NOPS (arg0);
9272 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9273 return NULL_TREE;
9275 arg1 = build_fold_indirect_ref (arg1);
9277 /* Proceed if a valid pointer type was passed in. */
9278 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9280 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9281 tree frac, exp;
9283 switch (value->cl)
9285 case rvc_zero:
9286 /* For +-0, return (*exp = 0, +-0). */
9287 exp = integer_zero_node;
9288 frac = arg0;
9289 break;
9290 case rvc_nan:
9291 case rvc_inf:
9292 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9293 return omit_one_operand (rettype, arg0, arg1);
9294 case rvc_normal:
9296 /* Since the frexp function always expects base 2, and in
9297 GCC normalized significands are already in the range
9298 [0.5, 1.0), we have exactly what frexp wants. */
9299 REAL_VALUE_TYPE frac_rvt = *value;
9300 SET_REAL_EXP (&frac_rvt, 0);
9301 frac = build_real (rettype, frac_rvt);
9302 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9304 break;
9305 default:
9306 gcc_unreachable ();
9309 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9310 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9311 TREE_SIDE_EFFECTS (arg1) = 1;
9312 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9315 return NULL_TREE;
9318 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9319 then we can assume the base is two. If it's false, then we have to
9320 check the mode of the TYPE parameter in certain cases. */
9322 static tree
9323 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9325 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9327 STRIP_NOPS (arg0);
9328 STRIP_NOPS (arg1);
9330 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9331 if (real_zerop (arg0) || integer_zerop (arg1)
9332 || (TREE_CODE (arg0) == REAL_CST
9333 && !real_isfinite (&TREE_REAL_CST (arg0))))
9334 return omit_one_operand (type, arg0, arg1);
9336 /* If both arguments are constant, then try to evaluate it. */
9337 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9338 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9339 && host_integerp (arg1, 0))
9341 /* Bound the maximum adjustment to twice the range of the
9342 mode's valid exponents. Use abs to ensure the range is
9343 positive as a sanity check. */
9344 const long max_exp_adj = 2 *
9345 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9346 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9348 /* Get the user-requested adjustment. */
9349 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9351 /* The requested adjustment must be inside this range. This
9352 is a preliminary cap to avoid things like overflow, we
9353 may still fail to compute the result for other reasons. */
9354 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9356 REAL_VALUE_TYPE initial_result;
9358 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9360 /* Ensure we didn't overflow. */
9361 if (! real_isinf (&initial_result))
9363 const REAL_VALUE_TYPE trunc_result
9364 = real_value_truncate (TYPE_MODE (type), initial_result);
9366 /* Only proceed if the target mode can hold the
9367 resulting value. */
9368 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9369 return build_real (type, trunc_result);
9375 return NULL_TREE;
9378 /* Fold a call to builtin modf. */
9380 static tree
9381 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9383 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9384 return NULL_TREE;
9386 STRIP_NOPS (arg0);
9388 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9389 return NULL_TREE;
9391 arg1 = build_fold_indirect_ref (arg1);
9393 /* Proceed if a valid pointer type was passed in. */
9394 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9396 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9397 REAL_VALUE_TYPE trunc, frac;
9399 switch (value->cl)
9401 case rvc_nan:
9402 case rvc_zero:
9403 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9404 trunc = frac = *value;
9405 break;
9406 case rvc_inf:
9407 /* For +-Inf, return (*arg1 = arg0, +-0). */
9408 frac = dconst0;
9409 frac.sign = value->sign;
9410 trunc = *value;
9411 break;
9412 case rvc_normal:
9413 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9414 real_trunc (&trunc, VOIDmode, value);
9415 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9416 /* If the original number was negative and already
9417 integral, then the fractional part is -0.0. */
9418 if (value->sign && frac.cl == rvc_zero)
9419 frac.sign = value->sign;
9420 break;
9423 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9424 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9425 build_real (rettype, trunc));
9426 TREE_SIDE_EFFECTS (arg1) = 1;
9427 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9428 build_real (rettype, frac));
9431 return NULL_TREE;
9434 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9435 ARG is the argument for the call. */
9437 static tree
9438 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9440 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9441 REAL_VALUE_TYPE r;
9443 if (!validate_arg (arg, REAL_TYPE))
9445 error ("non-floating-point argument to function %qs",
9446 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9447 return error_mark_node;
9450 switch (builtin_index)
9452 case BUILT_IN_ISINF:
9453 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9454 return omit_one_operand (type, integer_zero_node, arg);
9456 if (TREE_CODE (arg) == REAL_CST)
9458 r = TREE_REAL_CST (arg);
9459 if (real_isinf (&r))
9460 return real_compare (GT_EXPR, &r, &dconst0)
9461 ? integer_one_node : integer_minus_one_node;
9462 else
9463 return integer_zero_node;
9466 return NULL_TREE;
9468 case BUILT_IN_FINITE:
9469 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9470 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9471 return omit_one_operand (type, integer_one_node, arg);
9473 if (TREE_CODE (arg) == REAL_CST)
9475 r = TREE_REAL_CST (arg);
9476 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9479 return NULL_TREE;
9481 case BUILT_IN_ISNAN:
9482 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9483 return omit_one_operand (type, integer_zero_node, arg);
9485 if (TREE_CODE (arg) == REAL_CST)
9487 r = TREE_REAL_CST (arg);
9488 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9491 arg = builtin_save_expr (arg);
9492 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9494 default:
9495 gcc_unreachable ();
9499 /* Fold a call to an unordered comparison function such as
9500 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9501 being called and ARG0 and ARG1 are the arguments for the call.
9502 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9503 the opposite of the desired result. UNORDERED_CODE is used
9504 for modes that can hold NaNs and ORDERED_CODE is used for
9505 the rest. */
9507 static tree
9508 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9509 enum tree_code unordered_code,
9510 enum tree_code ordered_code)
9512 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9513 enum tree_code code;
9514 tree type0, type1;
9515 enum tree_code code0, code1;
9516 tree cmp_type = NULL_TREE;
9518 type0 = TREE_TYPE (arg0);
9519 type1 = TREE_TYPE (arg1);
9521 code0 = TREE_CODE (type0);
9522 code1 = TREE_CODE (type1);
9524 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9525 /* Choose the wider of two real types. */
9526 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9527 ? type0 : type1;
9528 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9529 cmp_type = type0;
9530 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9531 cmp_type = type1;
9532 else
9534 error ("non-floating-point argument to function %qs",
9535 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9536 return error_mark_node;
9539 arg0 = fold_convert (cmp_type, arg0);
9540 arg1 = fold_convert (cmp_type, arg1);
9542 if (unordered_code == UNORDERED_EXPR)
9544 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9545 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9546 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9549 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9550 : ordered_code;
9551 return fold_build1 (TRUTH_NOT_EXPR, type,
9552 fold_build2 (code, type, arg0, arg1));
9555 /* Fold a call to built-in function FNDECL with 0 arguments.
9556 IGNORE is true if the result of the function call is ignored. This
9557 function returns NULL_TREE if no simplification was possible. */
9559 static tree
9560 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9562 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9563 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9564 switch (fcode)
9566 CASE_FLT_FN (BUILT_IN_INF):
9567 case BUILT_IN_INFD32:
9568 case BUILT_IN_INFD64:
9569 case BUILT_IN_INFD128:
9570 return fold_builtin_inf (type, true);
9572 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9573 return fold_builtin_inf (type, false);
9575 case BUILT_IN_CLASSIFY_TYPE:
9576 return fold_builtin_classify_type (NULL_TREE);
9578 default:
9579 break;
9581 return NULL_TREE;
9584 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9585 IGNORE is true if the result of the function call is ignored. This
9586 function returns NULL_TREE if no simplification was possible. */
9588 static tree
9589 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9591 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9592 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9593 switch (fcode)
9596 case BUILT_IN_CONSTANT_P:
9598 tree val = fold_builtin_constant_p (arg0);
9600 /* Gimplification will pull the CALL_EXPR for the builtin out of
9601 an if condition. When not optimizing, we'll not CSE it back.
9602 To avoid link error types of regressions, return false now. */
9603 if (!val && !optimize)
9604 val = integer_zero_node;
9606 return val;
9609 case BUILT_IN_CLASSIFY_TYPE:
9610 return fold_builtin_classify_type (arg0);
9612 case BUILT_IN_STRLEN:
9613 return fold_builtin_strlen (arg0);
9615 CASE_FLT_FN (BUILT_IN_FABS):
9616 return fold_builtin_fabs (arg0, type);
9618 case BUILT_IN_ABS:
9619 case BUILT_IN_LABS:
9620 case BUILT_IN_LLABS:
9621 case BUILT_IN_IMAXABS:
9622 return fold_builtin_abs (arg0, type);
9624 CASE_FLT_FN (BUILT_IN_CONJ):
9625 if (validate_arg (arg0, COMPLEX_TYPE))
9626 return fold_build1 (CONJ_EXPR, type, arg0);
9627 break;
9629 CASE_FLT_FN (BUILT_IN_CREAL):
9630 if (validate_arg (arg0, COMPLEX_TYPE))
9631 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9632 break;
9634 CASE_FLT_FN (BUILT_IN_CIMAG):
9635 if (validate_arg (arg0, COMPLEX_TYPE))
9636 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9637 break;
9639 CASE_FLT_FN (BUILT_IN_CCOS):
9640 CASE_FLT_FN (BUILT_IN_CCOSH):
9641 /* These functions are "even", i.e. f(x) == f(-x). */
9642 if (validate_arg (arg0, COMPLEX_TYPE))
9644 tree narg = fold_strip_sign_ops (arg0);
9645 if (narg)
9646 return build_call_expr (fndecl, 1, narg);
9648 break;
9650 CASE_FLT_FN (BUILT_IN_CABS):
9651 return fold_builtin_cabs (arg0, type, fndecl);
9653 CASE_FLT_FN (BUILT_IN_CARG):
9654 return fold_builtin_carg (arg0, type);
9656 CASE_FLT_FN (BUILT_IN_SQRT):
9657 return fold_builtin_sqrt (arg0, type);
9659 CASE_FLT_FN (BUILT_IN_CBRT):
9660 return fold_builtin_cbrt (arg0, type);
9662 CASE_FLT_FN (BUILT_IN_ASIN):
9663 if (validate_arg (arg0, REAL_TYPE))
9664 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9665 &dconstm1, &dconst1, true);
9666 break;
9668 CASE_FLT_FN (BUILT_IN_ACOS):
9669 if (validate_arg (arg0, REAL_TYPE))
9670 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9671 &dconstm1, &dconst1, true);
9672 break;
9674 CASE_FLT_FN (BUILT_IN_ATAN):
9675 if (validate_arg (arg0, REAL_TYPE))
9676 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9677 break;
9679 CASE_FLT_FN (BUILT_IN_ASINH):
9680 if (validate_arg (arg0, REAL_TYPE))
9681 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9682 break;
9684 CASE_FLT_FN (BUILT_IN_ACOSH):
9685 if (validate_arg (arg0, REAL_TYPE))
9686 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9687 &dconst1, NULL, true);
9688 break;
9690 CASE_FLT_FN (BUILT_IN_ATANH):
9691 if (validate_arg (arg0, REAL_TYPE))
9692 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9693 &dconstm1, &dconst1, false);
9694 break;
9696 CASE_FLT_FN (BUILT_IN_SIN):
9697 if (validate_arg (arg0, REAL_TYPE))
9698 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9699 break;
9701 CASE_FLT_FN (BUILT_IN_COS):
9702 return fold_builtin_cos (arg0, type, fndecl);
9703 break;
9705 CASE_FLT_FN (BUILT_IN_TAN):
9706 return fold_builtin_tan (arg0, type);
9708 CASE_FLT_FN (BUILT_IN_CEXP):
9709 return fold_builtin_cexp (arg0, type);
9711 CASE_FLT_FN (BUILT_IN_CEXPI):
9712 if (validate_arg (arg0, REAL_TYPE))
9713 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9714 break;
9716 CASE_FLT_FN (BUILT_IN_SINH):
9717 if (validate_arg (arg0, REAL_TYPE))
9718 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9719 break;
9721 CASE_FLT_FN (BUILT_IN_COSH):
9722 return fold_builtin_cosh (arg0, type, fndecl);
9724 CASE_FLT_FN (BUILT_IN_TANH):
9725 if (validate_arg (arg0, REAL_TYPE))
9726 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9727 break;
9729 CASE_FLT_FN (BUILT_IN_ERF):
9730 if (validate_arg (arg0, REAL_TYPE))
9731 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9732 break;
9734 CASE_FLT_FN (BUILT_IN_ERFC):
9735 if (validate_arg (arg0, REAL_TYPE))
9736 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9737 break;
9739 CASE_FLT_FN (BUILT_IN_TGAMMA):
9740 if (validate_arg (arg0, REAL_TYPE))
9741 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9742 break;
9744 CASE_FLT_FN (BUILT_IN_EXP):
9745 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9747 CASE_FLT_FN (BUILT_IN_EXP2):
9748 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9750 CASE_FLT_FN (BUILT_IN_EXP10):
9751 CASE_FLT_FN (BUILT_IN_POW10):
9752 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9754 CASE_FLT_FN (BUILT_IN_EXPM1):
9755 if (validate_arg (arg0, REAL_TYPE))
9756 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9757 break;
9759 CASE_FLT_FN (BUILT_IN_LOG):
9760 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9762 CASE_FLT_FN (BUILT_IN_LOG2):
9763 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9765 CASE_FLT_FN (BUILT_IN_LOG10):
9766 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9768 CASE_FLT_FN (BUILT_IN_LOG1P):
9769 if (validate_arg (arg0, REAL_TYPE))
9770 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9771 &dconstm1, NULL, false);
9772 break;
9774 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9775 CASE_FLT_FN (BUILT_IN_J0):
9776 if (validate_arg (arg0, REAL_TYPE))
9777 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9778 NULL, NULL, 0);
9779 break;
9781 CASE_FLT_FN (BUILT_IN_J1):
9782 if (validate_arg (arg0, REAL_TYPE))
9783 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9784 NULL, NULL, 0);
9785 break;
9787 CASE_FLT_FN (BUILT_IN_Y0):
9788 if (validate_arg (arg0, REAL_TYPE))
9789 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9790 &dconst0, NULL, false);
9791 break;
9793 CASE_FLT_FN (BUILT_IN_Y1):
9794 if (validate_arg (arg0, REAL_TYPE))
9795 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9796 &dconst0, NULL, false);
9797 break;
9798 #endif
9800 CASE_FLT_FN (BUILT_IN_NAN):
9801 case BUILT_IN_NAND32:
9802 case BUILT_IN_NAND64:
9803 case BUILT_IN_NAND128:
9804 return fold_builtin_nan (arg0, type, true);
9806 CASE_FLT_FN (BUILT_IN_NANS):
9807 return fold_builtin_nan (arg0, type, false);
9809 CASE_FLT_FN (BUILT_IN_FLOOR):
9810 return fold_builtin_floor (fndecl, arg0);
9812 CASE_FLT_FN (BUILT_IN_CEIL):
9813 return fold_builtin_ceil (fndecl, arg0);
9815 CASE_FLT_FN (BUILT_IN_TRUNC):
9816 return fold_builtin_trunc (fndecl, arg0);
9818 CASE_FLT_FN (BUILT_IN_ROUND):
9819 return fold_builtin_round (fndecl, arg0);
9821 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9822 CASE_FLT_FN (BUILT_IN_RINT):
9823 return fold_trunc_transparent_mathfn (fndecl, arg0);
9825 CASE_FLT_FN (BUILT_IN_LCEIL):
9826 CASE_FLT_FN (BUILT_IN_LLCEIL):
9827 CASE_FLT_FN (BUILT_IN_LFLOOR):
9828 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9829 CASE_FLT_FN (BUILT_IN_LROUND):
9830 CASE_FLT_FN (BUILT_IN_LLROUND):
9831 return fold_builtin_int_roundingfn (fndecl, arg0);
9833 CASE_FLT_FN (BUILT_IN_LRINT):
9834 CASE_FLT_FN (BUILT_IN_LLRINT):
9835 return fold_fixed_mathfn (fndecl, arg0);
9837 case BUILT_IN_BSWAP32:
9838 case BUILT_IN_BSWAP64:
9839 return fold_builtin_bswap (fndecl, arg0);
9841 CASE_INT_FN (BUILT_IN_FFS):
9842 CASE_INT_FN (BUILT_IN_CLZ):
9843 CASE_INT_FN (BUILT_IN_CTZ):
9844 CASE_INT_FN (BUILT_IN_POPCOUNT):
9845 CASE_INT_FN (BUILT_IN_PARITY):
9846 return fold_builtin_bitop (fndecl, arg0);
9848 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9849 return fold_builtin_signbit (arg0, type);
9851 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9852 return fold_builtin_significand (arg0, type);
9854 CASE_FLT_FN (BUILT_IN_ILOGB):
9855 CASE_FLT_FN (BUILT_IN_LOGB):
9856 return fold_builtin_logb (arg0, type);
9858 case BUILT_IN_ISASCII:
9859 return fold_builtin_isascii (arg0);
9861 case BUILT_IN_TOASCII:
9862 return fold_builtin_toascii (arg0);
9864 case BUILT_IN_ISDIGIT:
9865 return fold_builtin_isdigit (arg0);
9867 CASE_FLT_FN (BUILT_IN_FINITE):
9868 case BUILT_IN_FINITED32:
9869 case BUILT_IN_FINITED64:
9870 case BUILT_IN_FINITED128:
9871 return fold_builtin_classify (fndecl, arg0, BUILT_IN_FINITE);
9873 CASE_FLT_FN (BUILT_IN_ISINF):
9874 case BUILT_IN_ISINFD32:
9875 case BUILT_IN_ISINFD64:
9876 case BUILT_IN_ISINFD128:
9877 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
9879 CASE_FLT_FN (BUILT_IN_ISNAN):
9880 case BUILT_IN_ISNAND32:
9881 case BUILT_IN_ISNAND64:
9882 case BUILT_IN_ISNAND128:
9883 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
9885 case BUILT_IN_PRINTF:
9886 case BUILT_IN_PRINTF_UNLOCKED:
9887 case BUILT_IN_VPRINTF:
9888 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
9890 default:
9891 break;
9894 return NULL_TREE;
9898 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9899 IGNORE is true if the result of the function call is ignored. This
9900 function returns NULL_TREE if no simplification was possible. */
9902 static tree
9903 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
9905 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9906 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9908 switch (fcode)
9910 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9911 CASE_FLT_FN (BUILT_IN_JN):
9912 if (validate_arg (arg0, INTEGER_TYPE)
9913 && validate_arg (arg1, REAL_TYPE))
9914 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9915 break;
9917 CASE_FLT_FN (BUILT_IN_YN):
9918 if (validate_arg (arg0, INTEGER_TYPE)
9919 && validate_arg (arg1, REAL_TYPE))
9920 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9921 &dconst0, false);
9922 break;
9924 CASE_FLT_FN (BUILT_IN_DREM):
9925 CASE_FLT_FN (BUILT_IN_REMAINDER):
9926 if (validate_arg (arg0, REAL_TYPE)
9927 && validate_arg(arg1, REAL_TYPE))
9928 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9929 break;
9931 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9932 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9933 if (validate_arg (arg0, REAL_TYPE)
9934 && validate_arg(arg1, POINTER_TYPE))
9935 return do_mpfr_lgamma_r (arg0, arg1, type);
9936 break;
9937 #endif
9939 CASE_FLT_FN (BUILT_IN_ATAN2):
9940 if (validate_arg (arg0, REAL_TYPE)
9941 && validate_arg(arg1, REAL_TYPE))
9942 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9943 break;
9945 CASE_FLT_FN (BUILT_IN_FDIM):
9946 if (validate_arg (arg0, REAL_TYPE)
9947 && validate_arg(arg1, REAL_TYPE))
9948 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9949 break;
9951 CASE_FLT_FN (BUILT_IN_HYPOT):
9952 return fold_builtin_hypot (fndecl, arg0, arg1, type);
9954 CASE_FLT_FN (BUILT_IN_LDEXP):
9955 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
9956 CASE_FLT_FN (BUILT_IN_SCALBN):
9957 CASE_FLT_FN (BUILT_IN_SCALBLN):
9958 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
9960 CASE_FLT_FN (BUILT_IN_FREXP):
9961 return fold_builtin_frexp (arg0, arg1, type);
9963 CASE_FLT_FN (BUILT_IN_MODF):
9964 return fold_builtin_modf (arg0, arg1, type);
9966 case BUILT_IN_BZERO:
9967 return fold_builtin_bzero (arg0, arg1, ignore);
9969 case BUILT_IN_FPUTS:
9970 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
9972 case BUILT_IN_FPUTS_UNLOCKED:
9973 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
9975 case BUILT_IN_STRSTR:
9976 return fold_builtin_strstr (arg0, arg1, type);
9978 case BUILT_IN_STRCAT:
9979 return fold_builtin_strcat (arg0, arg1);
9981 case BUILT_IN_STRSPN:
9982 return fold_builtin_strspn (arg0, arg1);
9984 case BUILT_IN_STRCSPN:
9985 return fold_builtin_strcspn (arg0, arg1);
9987 case BUILT_IN_STRCHR:
9988 case BUILT_IN_INDEX:
9989 return fold_builtin_strchr (arg0, arg1, type);
9991 case BUILT_IN_STRRCHR:
9992 case BUILT_IN_RINDEX:
9993 return fold_builtin_strrchr (arg0, arg1, type);
9995 case BUILT_IN_STRCPY:
9996 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
9998 case BUILT_IN_STRCMP:
9999 return fold_builtin_strcmp (arg0, arg1);
10001 case BUILT_IN_STRPBRK:
10002 return fold_builtin_strpbrk (arg0, arg1, type);
10004 case BUILT_IN_EXPECT:
10005 return fold_builtin_expect (arg0);
10007 CASE_FLT_FN (BUILT_IN_POW):
10008 return fold_builtin_pow (fndecl, arg0, arg1, type);
10010 CASE_FLT_FN (BUILT_IN_POWI):
10011 return fold_builtin_powi (fndecl, arg0, arg1, type);
10013 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10014 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10016 CASE_FLT_FN (BUILT_IN_FMIN):
10017 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10019 CASE_FLT_FN (BUILT_IN_FMAX):
10020 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10022 case BUILT_IN_ISGREATER:
10023 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10024 case BUILT_IN_ISGREATEREQUAL:
10025 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10026 case BUILT_IN_ISLESS:
10027 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10028 case BUILT_IN_ISLESSEQUAL:
10029 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10030 case BUILT_IN_ISLESSGREATER:
10031 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10032 case BUILT_IN_ISUNORDERED:
10033 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10034 NOP_EXPR);
10036 /* We do the folding for va_start in the expander. */
10037 case BUILT_IN_VA_START:
10038 break;
10040 case BUILT_IN_SPRINTF:
10041 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10043 case BUILT_IN_OBJECT_SIZE:
10044 return fold_builtin_object_size (arg0, arg1);
10046 case BUILT_IN_PRINTF:
10047 case BUILT_IN_PRINTF_UNLOCKED:
10048 case BUILT_IN_VPRINTF:
10049 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10051 case BUILT_IN_PRINTF_CHK:
10052 case BUILT_IN_VPRINTF_CHK:
10053 if (!validate_arg (arg0, INTEGER_TYPE)
10054 || TREE_SIDE_EFFECTS (arg0))
10055 return NULL_TREE;
10056 else
10057 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10058 break;
10060 case BUILT_IN_FPRINTF:
10061 case BUILT_IN_FPRINTF_UNLOCKED:
10062 case BUILT_IN_VFPRINTF:
10063 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10064 ignore, fcode);
10066 default:
10067 break;
10069 return NULL_TREE;
10072 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10073 and ARG2. IGNORE is true if the result of the function call is ignored.
10074 This function returns NULL_TREE if no simplification was possible. */
10076 static tree
10077 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10079 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10080 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10081 switch (fcode)
10084 CASE_FLT_FN (BUILT_IN_SINCOS):
10085 return fold_builtin_sincos (arg0, arg1, arg2);
10087 CASE_FLT_FN (BUILT_IN_FMA):
10088 if (validate_arg (arg0, REAL_TYPE)
10089 && validate_arg(arg1, REAL_TYPE)
10090 && validate_arg(arg2, REAL_TYPE))
10091 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10092 break;
10094 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10095 CASE_FLT_FN (BUILT_IN_REMQUO):
10096 if (validate_arg (arg0, REAL_TYPE)
10097 && validate_arg(arg1, REAL_TYPE)
10098 && validate_arg(arg2, POINTER_TYPE))
10099 return do_mpfr_remquo (arg0, arg1, arg2);
10100 break;
10101 #endif
10103 case BUILT_IN_MEMSET:
10104 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10106 case BUILT_IN_BCOPY:
10107 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10109 case BUILT_IN_MEMCPY:
10110 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10112 case BUILT_IN_MEMPCPY:
10113 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10115 case BUILT_IN_MEMMOVE:
10116 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10118 case BUILT_IN_STRNCAT:
10119 return fold_builtin_strncat (arg0, arg1, arg2);
10121 case BUILT_IN_STRNCPY:
10122 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10124 case BUILT_IN_STRNCMP:
10125 return fold_builtin_strncmp (arg0, arg1, arg2);
10127 case BUILT_IN_MEMCHR:
10128 return fold_builtin_memchr (arg0, arg1, arg2, type);
10130 case BUILT_IN_BCMP:
10131 case BUILT_IN_MEMCMP:
10132 return fold_builtin_memcmp (arg0, arg1, arg2);;
10134 case BUILT_IN_SPRINTF:
10135 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10137 case BUILT_IN_STRCPY_CHK:
10138 case BUILT_IN_STPCPY_CHK:
10139 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10140 ignore, fcode);
10142 case BUILT_IN_STRCAT_CHK:
10143 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10145 case BUILT_IN_PRINTF_CHK:
10146 case BUILT_IN_VPRINTF_CHK:
10147 if (!validate_arg (arg0, INTEGER_TYPE)
10148 || TREE_SIDE_EFFECTS (arg0))
10149 return NULL_TREE;
10150 else
10151 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10152 break;
10154 case BUILT_IN_FPRINTF:
10155 case BUILT_IN_FPRINTF_UNLOCKED:
10156 case BUILT_IN_VFPRINTF:
10157 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10159 case BUILT_IN_FPRINTF_CHK:
10160 case BUILT_IN_VFPRINTF_CHK:
10161 if (!validate_arg (arg1, INTEGER_TYPE)
10162 || TREE_SIDE_EFFECTS (arg1))
10163 return NULL_TREE;
10164 else
10165 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10166 ignore, fcode);
10168 default:
10169 break;
10171 return NULL_TREE;
10174 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10175 ARG2, and ARG3. IGNORE is true if the result of the function call is
10176 ignored. This function returns NULL_TREE if no simplification was
10177 possible. */
10179 static tree
10180 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10181 bool ignore)
10183 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10185 switch (fcode)
10187 case BUILT_IN_MEMCPY_CHK:
10188 case BUILT_IN_MEMPCPY_CHK:
10189 case BUILT_IN_MEMMOVE_CHK:
10190 case BUILT_IN_MEMSET_CHK:
10191 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10192 NULL_TREE, ignore,
10193 DECL_FUNCTION_CODE (fndecl));
10195 case BUILT_IN_STRNCPY_CHK:
10196 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10198 case BUILT_IN_STRNCAT_CHK:
10199 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10201 case BUILT_IN_FPRINTF_CHK:
10202 case BUILT_IN_VFPRINTF_CHK:
10203 if (!validate_arg (arg1, INTEGER_TYPE)
10204 || TREE_SIDE_EFFECTS (arg1))
10205 return NULL_TREE;
10206 else
10207 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10208 ignore, fcode);
10209 break;
10211 default:
10212 break;
10214 return NULL_TREE;
10217 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10218 arguments, where NARGS <= 4. IGNORE is true if the result of the
10219 function call is ignored. This function returns NULL_TREE if no
10220 simplification was possible. Note that this only folds builtins with
10221 fixed argument patterns. Foldings that do varargs-to-varargs
10222 transformations, or that match calls with more than 4 arguments,
10223 need to be handled with fold_builtin_varargs instead. */
10225 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10227 static tree
10228 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10230 tree ret = NULL_TREE;
10231 switch (nargs)
10233 case 0:
10234 ret = fold_builtin_0 (fndecl, ignore);
10235 break;
10236 case 1:
10237 ret = fold_builtin_1 (fndecl, args[0], ignore);
10238 break;
10239 case 2:
10240 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10241 break;
10242 case 3:
10243 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10244 break;
10245 case 4:
10246 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10247 ignore);
10248 break;
10249 default:
10250 break;
10252 if (ret)
10254 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10255 TREE_NO_WARNING (ret) = 1;
10256 return ret;
10258 return NULL_TREE;
10261 /* Builtins with folding operations that operate on "..." arguments
10262 need special handling; we need to store the arguments in a convenient
10263 data structure before attempting any folding. Fortunately there are
10264 only a few builtins that fall into this category. FNDECL is the
10265 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10266 result of the function call is ignored. */
10268 static tree
10269 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10271 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10272 tree ret = NULL_TREE;
10274 switch (fcode)
10276 case BUILT_IN_SPRINTF_CHK:
10277 case BUILT_IN_VSPRINTF_CHK:
10278 ret = fold_builtin_sprintf_chk (exp, fcode);
10279 break;
10281 case BUILT_IN_SNPRINTF_CHK:
10282 case BUILT_IN_VSNPRINTF_CHK:
10283 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10285 default:
10286 break;
10288 if (ret)
10290 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10291 TREE_NO_WARNING (ret) = 1;
10292 return ret;
10294 return NULL_TREE;
10297 /* A wrapper function for builtin folding that prevents warnings for
10298 "statement without effect" and the like, caused by removing the
10299 call node earlier than the warning is generated. */
10301 tree
10302 fold_call_expr (tree exp, bool ignore)
10304 tree ret = NULL_TREE;
10305 tree fndecl = get_callee_fndecl (exp);
10306 if (fndecl
10307 && TREE_CODE (fndecl) == FUNCTION_DECL
10308 && DECL_BUILT_IN (fndecl))
10310 /* FIXME: Don't use a list in this interface. */
10311 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10312 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10313 else
10315 int nargs = call_expr_nargs (exp);
10316 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10318 tree *args = CALL_EXPR_ARGP (exp);
10319 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10321 if (!ret)
10322 ret = fold_builtin_varargs (fndecl, exp, ignore);
10323 if (ret)
10325 /* Propagate location information from original call to
10326 expansion of builtin. Otherwise things like
10327 maybe_emit_chk_warning, that operate on the expansion
10328 of a builtin, will use the wrong location information. */
10329 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10331 tree realret = ret;
10332 if (TREE_CODE (ret) == NOP_EXPR)
10333 realret = TREE_OPERAND (ret, 0);
10334 if (CAN_HAVE_LOCATION_P (realret)
10335 && !EXPR_HAS_LOCATION (realret))
10336 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10338 return ret;
10342 return NULL_TREE;
10345 /* Conveniently construct a function call expression. FNDECL names the
10346 function to be called and ARGLIST is a TREE_LIST of arguments. */
10348 tree
10349 build_function_call_expr (tree fndecl, tree arglist)
10351 tree fntype = TREE_TYPE (fndecl);
10352 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10353 int n = list_length (arglist);
10354 tree *argarray = (tree *) alloca (n * sizeof (tree));
10355 int i;
10357 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10358 argarray[i] = TREE_VALUE (arglist);
10359 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10362 /* Conveniently construct a function call expression. FNDECL names the
10363 function to be called, N is the number of arguments, and the "..."
10364 parameters are the argument expressions. */
10366 tree
10367 build_call_expr (tree fndecl, int n, ...)
10369 va_list ap;
10370 tree fntype = TREE_TYPE (fndecl);
10371 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10372 tree *argarray = (tree *) alloca (n * sizeof (tree));
10373 int i;
10375 va_start (ap, n);
10376 for (i = 0; i < n; i++)
10377 argarray[i] = va_arg (ap, tree);
10378 va_end (ap);
10379 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10382 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10383 N arguments are passed in the array ARGARRAY. */
10385 tree
10386 fold_builtin_call_array (tree type,
10387 tree fn,
10388 int n,
10389 tree *argarray)
10391 tree ret = NULL_TREE;
10392 int i;
10393 tree exp;
10395 if (TREE_CODE (fn) == ADDR_EXPR)
10397 tree fndecl = TREE_OPERAND (fn, 0);
10398 if (TREE_CODE (fndecl) == FUNCTION_DECL
10399 && DECL_BUILT_IN (fndecl))
10401 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10403 tree arglist = NULL_TREE;
10404 for (i = n - 1; i >= 0; i--)
10405 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10406 ret = targetm.fold_builtin (fndecl, arglist, false);
10407 if (ret)
10408 return ret;
10410 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10412 /* First try the transformations that don't require consing up
10413 an exp. */
10414 ret = fold_builtin_n (fndecl, argarray, n, false);
10415 if (ret)
10416 return ret;
10419 /* If we got this far, we need to build an exp. */
10420 exp = build_call_array (type, fn, n, argarray);
10421 ret = fold_builtin_varargs (fndecl, exp, false);
10422 return ret ? ret : exp;
10426 return build_call_array (type, fn, n, argarray);
10429 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10430 along with N new arguments specified as the "..." parameters. SKIP
10431 is the number of arguments in EXP to be omitted. This function is used
10432 to do varargs-to-varargs transformations. */
10434 static tree
10435 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10437 int oldnargs = call_expr_nargs (exp);
10438 int nargs = oldnargs - skip + n;
10439 tree fntype = TREE_TYPE (fndecl);
10440 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10441 tree *buffer;
10443 if (n > 0)
10445 int i, j;
10446 va_list ap;
10448 buffer = alloca (nargs * sizeof (tree));
10449 va_start (ap, n);
10450 for (i = 0; i < n; i++)
10451 buffer[i] = va_arg (ap, tree);
10452 va_end (ap);
10453 for (j = skip; j < oldnargs; j++, i++)
10454 buffer[i] = CALL_EXPR_ARG (exp, j);
10456 else
10457 buffer = CALL_EXPR_ARGP (exp) + skip;
10459 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10462 /* Validate a single argument ARG against a tree code CODE representing
10463 a type. */
10465 static bool
10466 validate_arg (tree arg, enum tree_code code)
10468 if (!arg)
10469 return false;
10470 else if (code == POINTER_TYPE)
10471 return POINTER_TYPE_P (TREE_TYPE (arg));
10472 return code == TREE_CODE (TREE_TYPE (arg));
10475 /* This function validates the types of a function call argument list
10476 against a specified list of tree_codes. If the last specifier is a 0,
10477 that represents an ellipses, otherwise the last specifier must be a
10478 VOID_TYPE. */
10480 bool
10481 validate_arglist (tree callexpr, ...)
10483 enum tree_code code;
10484 bool res = 0;
10485 va_list ap;
10486 call_expr_arg_iterator iter;
10487 tree arg;
10489 va_start (ap, callexpr);
10490 init_call_expr_arg_iterator (callexpr, &iter);
10494 code = va_arg (ap, enum tree_code);
10495 switch (code)
10497 case 0:
10498 /* This signifies an ellipses, any further arguments are all ok. */
10499 res = true;
10500 goto end;
10501 case VOID_TYPE:
10502 /* This signifies an endlink, if no arguments remain, return
10503 true, otherwise return false. */
10504 res = !more_call_expr_args_p (&iter);
10505 goto end;
10506 default:
10507 /* If no parameters remain or the parameter's code does not
10508 match the specified code, return false. Otherwise continue
10509 checking any remaining arguments. */
10510 arg = next_call_expr_arg (&iter);
10511 if (!validate_arg (arg, code))
10512 goto end;
10513 break;
10516 while (1);
10518 /* We need gotos here since we can only have one VA_CLOSE in a
10519 function. */
10520 end: ;
10521 va_end (ap);
10523 return res;
10526 /* Default target-specific builtin expander that does nothing. */
10529 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10530 rtx target ATTRIBUTE_UNUSED,
10531 rtx subtarget ATTRIBUTE_UNUSED,
10532 enum machine_mode mode ATTRIBUTE_UNUSED,
10533 int ignore ATTRIBUTE_UNUSED)
10535 return NULL_RTX;
10538 /* Returns true is EXP represents data that would potentially reside
10539 in a readonly section. */
10541 static bool
10542 readonly_data_expr (tree exp)
10544 STRIP_NOPS (exp);
10546 if (TREE_CODE (exp) != ADDR_EXPR)
10547 return false;
10549 exp = get_base_address (TREE_OPERAND (exp, 0));
10550 if (!exp)
10551 return false;
10553 /* Make sure we call decl_readonly_section only for trees it
10554 can handle (since it returns true for everything it doesn't
10555 understand). */
10556 if (TREE_CODE (exp) == STRING_CST
10557 || TREE_CODE (exp) == CONSTRUCTOR
10558 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10559 return decl_readonly_section (exp, 0);
10560 else
10561 return false;
10564 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10565 to the call, and TYPE is its return type.
10567 Return NULL_TREE if no simplification was possible, otherwise return the
10568 simplified form of the call as a tree.
10570 The simplified form may be a constant or other expression which
10571 computes the same value, but in a more efficient manner (including
10572 calls to other builtin functions).
10574 The call may contain arguments which need to be evaluated, but
10575 which are not useful to determine the result of the call. In
10576 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10577 COMPOUND_EXPR will be an argument which must be evaluated.
10578 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10579 COMPOUND_EXPR in the chain will contain the tree for the simplified
10580 form of the builtin function call. */
10582 static tree
10583 fold_builtin_strstr (tree s1, tree s2, tree type)
10585 if (!validate_arg (s1, POINTER_TYPE)
10586 || !validate_arg (s2, POINTER_TYPE))
10587 return NULL_TREE;
10588 else
10590 tree fn;
10591 const char *p1, *p2;
10593 p2 = c_getstr (s2);
10594 if (p2 == NULL)
10595 return NULL_TREE;
10597 p1 = c_getstr (s1);
10598 if (p1 != NULL)
10600 const char *r = strstr (p1, p2);
10601 tree tem;
10603 if (r == NULL)
10604 return build_int_cst (TREE_TYPE (s1), 0);
10606 /* Return an offset into the constant string argument. */
10607 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10608 s1, size_int (r - p1));
10609 return fold_convert (type, tem);
10612 /* The argument is const char *, and the result is char *, so we need
10613 a type conversion here to avoid a warning. */
10614 if (p2[0] == '\0')
10615 return fold_convert (type, s1);
10617 if (p2[1] != '\0')
10618 return NULL_TREE;
10620 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10621 if (!fn)
10622 return NULL_TREE;
10624 /* New argument list transforming strstr(s1, s2) to
10625 strchr(s1, s2[0]). */
10626 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10630 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10631 the call, and TYPE is its return type.
10633 Return NULL_TREE if no simplification was possible, otherwise return the
10634 simplified form of the call as a tree.
10636 The simplified form may be a constant or other expression which
10637 computes the same value, but in a more efficient manner (including
10638 calls to other builtin functions).
10640 The call may contain arguments which need to be evaluated, but
10641 which are not useful to determine the result of the call. In
10642 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10643 COMPOUND_EXPR will be an argument which must be evaluated.
10644 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10645 COMPOUND_EXPR in the chain will contain the tree for the simplified
10646 form of the builtin function call. */
10648 static tree
10649 fold_builtin_strchr (tree s1, tree s2, tree type)
10651 if (!validate_arg (s1, POINTER_TYPE)
10652 || !validate_arg (s2, INTEGER_TYPE))
10653 return NULL_TREE;
10654 else
10656 const char *p1;
10658 if (TREE_CODE (s2) != INTEGER_CST)
10659 return NULL_TREE;
10661 p1 = c_getstr (s1);
10662 if (p1 != NULL)
10664 char c;
10665 const char *r;
10666 tree tem;
10668 if (target_char_cast (s2, &c))
10669 return NULL_TREE;
10671 r = strchr (p1, c);
10673 if (r == NULL)
10674 return build_int_cst (TREE_TYPE (s1), 0);
10676 /* Return an offset into the constant string argument. */
10677 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10678 s1, size_int (r - p1));
10679 return fold_convert (type, tem);
10681 return NULL_TREE;
10685 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10686 the call, and TYPE is its return type.
10688 Return NULL_TREE if no simplification was possible, otherwise return the
10689 simplified form of the call as a tree.
10691 The simplified form may be a constant or other expression which
10692 computes the same value, but in a more efficient manner (including
10693 calls to other builtin functions).
10695 The call may contain arguments which need to be evaluated, but
10696 which are not useful to determine the result of the call. In
10697 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10698 COMPOUND_EXPR will be an argument which must be evaluated.
10699 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10700 COMPOUND_EXPR in the chain will contain the tree for the simplified
10701 form of the builtin function call. */
10703 static tree
10704 fold_builtin_strrchr (tree s1, tree s2, tree type)
10706 if (!validate_arg (s1, POINTER_TYPE)
10707 || !validate_arg (s2, INTEGER_TYPE))
10708 return NULL_TREE;
10709 else
10711 tree fn;
10712 const char *p1;
10714 if (TREE_CODE (s2) != INTEGER_CST)
10715 return NULL_TREE;
10717 p1 = c_getstr (s1);
10718 if (p1 != NULL)
10720 char c;
10721 const char *r;
10722 tree tem;
10724 if (target_char_cast (s2, &c))
10725 return NULL_TREE;
10727 r = strrchr (p1, c);
10729 if (r == NULL)
10730 return build_int_cst (TREE_TYPE (s1), 0);
10732 /* Return an offset into the constant string argument. */
10733 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10734 s1, size_int (r - p1));
10735 return fold_convert (type, tem);
10738 if (! integer_zerop (s2))
10739 return NULL_TREE;
10741 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10742 if (!fn)
10743 return NULL_TREE;
10745 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10746 return build_call_expr (fn, 2, s1, s2);
10750 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10751 to the call, and TYPE is its return type.
10753 Return NULL_TREE if no simplification was possible, otherwise return the
10754 simplified form of the call as a tree.
10756 The simplified form may be a constant or other expression which
10757 computes the same value, but in a more efficient manner (including
10758 calls to other builtin functions).
10760 The call may contain arguments which need to be evaluated, but
10761 which are not useful to determine the result of the call. In
10762 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10763 COMPOUND_EXPR will be an argument which must be evaluated.
10764 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10765 COMPOUND_EXPR in the chain will contain the tree for the simplified
10766 form of the builtin function call. */
10768 static tree
10769 fold_builtin_strpbrk (tree s1, tree s2, tree type)
10771 if (!validate_arg (s1, POINTER_TYPE)
10772 || !validate_arg (s2, POINTER_TYPE))
10773 return NULL_TREE;
10774 else
10776 tree fn;
10777 const char *p1, *p2;
10779 p2 = c_getstr (s2);
10780 if (p2 == NULL)
10781 return NULL_TREE;
10783 p1 = c_getstr (s1);
10784 if (p1 != NULL)
10786 const char *r = strpbrk (p1, p2);
10787 tree tem;
10789 if (r == NULL)
10790 return build_int_cst (TREE_TYPE (s1), 0);
10792 /* Return an offset into the constant string argument. */
10793 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10794 s1, size_int (r - p1));
10795 return fold_convert (type, tem);
10798 if (p2[0] == '\0')
10799 /* strpbrk(x, "") == NULL.
10800 Evaluate and ignore s1 in case it had side-effects. */
10801 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
10803 if (p2[1] != '\0')
10804 return NULL_TREE; /* Really call strpbrk. */
10806 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10807 if (!fn)
10808 return NULL_TREE;
10810 /* New argument list transforming strpbrk(s1, s2) to
10811 strchr(s1, s2[0]). */
10812 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10816 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10817 to the call.
10819 Return NULL_TREE if no simplification was possible, otherwise return the
10820 simplified form of the call as a tree.
10822 The simplified form may be a constant or other expression which
10823 computes the same value, but in a more efficient manner (including
10824 calls to other builtin functions).
10826 The call may contain arguments which need to be evaluated, but
10827 which are not useful to determine the result of the call. In
10828 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10829 COMPOUND_EXPR will be an argument which must be evaluated.
10830 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10831 COMPOUND_EXPR in the chain will contain the tree for the simplified
10832 form of the builtin function call. */
10834 static tree
10835 fold_builtin_strcat (tree dst, tree src)
10837 if (!validate_arg (dst, POINTER_TYPE)
10838 || !validate_arg (src, POINTER_TYPE))
10839 return NULL_TREE;
10840 else
10842 const char *p = c_getstr (src);
10844 /* If the string length is zero, return the dst parameter. */
10845 if (p && *p == '\0')
10846 return dst;
10848 return NULL_TREE;
10852 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10853 arguments to the call.
10855 Return NULL_TREE if no simplification was possible, otherwise return the
10856 simplified form of the call as a tree.
10858 The simplified form may be a constant or other expression which
10859 computes the same value, but in a more efficient manner (including
10860 calls to other builtin functions).
10862 The call may contain arguments which need to be evaluated, but
10863 which are not useful to determine the result of the call. In
10864 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10865 COMPOUND_EXPR will be an argument which must be evaluated.
10866 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10867 COMPOUND_EXPR in the chain will contain the tree for the simplified
10868 form of the builtin function call. */
10870 static tree
10871 fold_builtin_strncat (tree dst, tree src, tree len)
10873 if (!validate_arg (dst, POINTER_TYPE)
10874 || !validate_arg (src, POINTER_TYPE)
10875 || !validate_arg (len, INTEGER_TYPE))
10876 return NULL_TREE;
10877 else
10879 const char *p = c_getstr (src);
10881 /* If the requested length is zero, or the src parameter string
10882 length is zero, return the dst parameter. */
10883 if (integer_zerop (len) || (p && *p == '\0'))
10884 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
10886 /* If the requested len is greater than or equal to the string
10887 length, call strcat. */
10888 if (TREE_CODE (len) == INTEGER_CST && p
10889 && compare_tree_int (len, strlen (p)) >= 0)
10891 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
10893 /* If the replacement _DECL isn't initialized, don't do the
10894 transformation. */
10895 if (!fn)
10896 return NULL_TREE;
10898 return build_call_expr (fn, 2, dst, src);
10900 return NULL_TREE;
10904 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10905 to the call.
10907 Return NULL_TREE if no simplification was possible, otherwise return the
10908 simplified form of the call as a tree.
10910 The simplified form may be a constant or other expression which
10911 computes the same value, but in a more efficient manner (including
10912 calls to other builtin functions).
10914 The call may contain arguments which need to be evaluated, but
10915 which are not useful to determine the result of the call. In
10916 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10917 COMPOUND_EXPR will be an argument which must be evaluated.
10918 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10919 COMPOUND_EXPR in the chain will contain the tree for the simplified
10920 form of the builtin function call. */
10922 static tree
10923 fold_builtin_strspn (tree s1, tree s2)
10925 if (!validate_arg (s1, POINTER_TYPE)
10926 || !validate_arg (s2, POINTER_TYPE))
10927 return NULL_TREE;
10928 else
10930 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10932 /* If both arguments are constants, evaluate at compile-time. */
10933 if (p1 && p2)
10935 const size_t r = strspn (p1, p2);
10936 return size_int (r);
10939 /* If either argument is "", return NULL_TREE. */
10940 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10941 /* Evaluate and ignore both arguments in case either one has
10942 side-effects. */
10943 return omit_two_operands (integer_type_node, integer_zero_node,
10944 s1, s2);
10945 return NULL_TREE;
10949 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10950 to the call.
10952 Return NULL_TREE if no simplification was possible, otherwise return the
10953 simplified form of the call as a tree.
10955 The simplified form may be a constant or other expression which
10956 computes the same value, but in a more efficient manner (including
10957 calls to other builtin functions).
10959 The call may contain arguments which need to be evaluated, but
10960 which are not useful to determine the result of the call. In
10961 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10962 COMPOUND_EXPR will be an argument which must be evaluated.
10963 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10964 COMPOUND_EXPR in the chain will contain the tree for the simplified
10965 form of the builtin function call. */
10967 static tree
10968 fold_builtin_strcspn (tree s1, tree s2)
10970 if (!validate_arg (s1, POINTER_TYPE)
10971 || !validate_arg (s2, POINTER_TYPE))
10972 return NULL_TREE;
10973 else
10975 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10977 /* If both arguments are constants, evaluate at compile-time. */
10978 if (p1 && p2)
10980 const size_t r = strcspn (p1, p2);
10981 return size_int (r);
10984 /* If the first argument is "", return NULL_TREE. */
10985 if (p1 && *p1 == '\0')
10987 /* Evaluate and ignore argument s2 in case it has
10988 side-effects. */
10989 return omit_one_operand (integer_type_node,
10990 integer_zero_node, s2);
10993 /* If the second argument is "", return __builtin_strlen(s1). */
10994 if (p2 && *p2 == '\0')
10996 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
10998 /* If the replacement _DECL isn't initialized, don't do the
10999 transformation. */
11000 if (!fn)
11001 return NULL_TREE;
11003 return build_call_expr (fn, 1, s1);
11005 return NULL_TREE;
11009 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11010 to the call. IGNORE is true if the value returned
11011 by the builtin will be ignored. UNLOCKED is true is true if this
11012 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11013 the known length of the string. Return NULL_TREE if no simplification
11014 was possible. */
11016 tree
11017 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11019 /* If we're using an unlocked function, assume the other unlocked
11020 functions exist explicitly. */
11021 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11022 : implicit_built_in_decls[BUILT_IN_FPUTC];
11023 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11024 : implicit_built_in_decls[BUILT_IN_FWRITE];
11026 /* If the return value is used, don't do the transformation. */
11027 if (!ignore)
11028 return NULL_TREE;
11030 /* Verify the arguments in the original call. */
11031 if (!validate_arg (arg0, POINTER_TYPE)
11032 || !validate_arg (arg1, POINTER_TYPE))
11033 return NULL_TREE;
11035 if (! len)
11036 len = c_strlen (arg0, 0);
11038 /* Get the length of the string passed to fputs. If the length
11039 can't be determined, punt. */
11040 if (!len
11041 || TREE_CODE (len) != INTEGER_CST)
11042 return NULL_TREE;
11044 switch (compare_tree_int (len, 1))
11046 case -1: /* length is 0, delete the call entirely . */
11047 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11049 case 0: /* length is 1, call fputc. */
11051 const char *p = c_getstr (arg0);
11053 if (p != NULL)
11055 if (fn_fputc)
11056 return build_call_expr (fn_fputc, 2,
11057 build_int_cst (NULL_TREE, p[0]), arg1);
11058 else
11059 return NULL_TREE;
11062 /* FALLTHROUGH */
11063 case 1: /* length is greater than 1, call fwrite. */
11065 /* If optimizing for size keep fputs. */
11066 if (optimize_size)
11067 return NULL_TREE;
11068 /* New argument list transforming fputs(string, stream) to
11069 fwrite(string, 1, len, stream). */
11070 if (fn_fwrite)
11071 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11072 else
11073 return NULL_TREE;
11075 default:
11076 gcc_unreachable ();
11078 return NULL_TREE;
11081 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11082 produced. False otherwise. This is done so that we don't output the error
11083 or warning twice or three times. */
11084 bool
11085 fold_builtin_next_arg (tree exp, bool va_start_p)
11087 tree fntype = TREE_TYPE (current_function_decl);
11088 int nargs = call_expr_nargs (exp);
11089 tree arg;
11091 if (TYPE_ARG_TYPES (fntype) == 0
11092 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11093 == void_type_node))
11095 error ("%<va_start%> used in function with fixed args");
11096 return true;
11099 if (va_start_p)
11101 if (va_start_p && (nargs != 2))
11103 error ("wrong number of arguments to function %<va_start%>");
11104 return true;
11106 arg = CALL_EXPR_ARG (exp, 1);
11108 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11109 when we checked the arguments and if needed issued a warning. */
11110 else
11112 if (nargs == 0)
11114 /* Evidently an out of date version of <stdarg.h>; can't validate
11115 va_start's second argument, but can still work as intended. */
11116 warning (0, "%<__builtin_next_arg%> called without an argument");
11117 return true;
11119 else if (nargs > 1)
11121 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11122 return true;
11124 arg = CALL_EXPR_ARG (exp, 0);
11127 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11128 or __builtin_next_arg (0) the first time we see it, after checking
11129 the arguments and if needed issuing a warning. */
11130 if (!integer_zerop (arg))
11132 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11134 /* Strip off all nops for the sake of the comparison. This
11135 is not quite the same as STRIP_NOPS. It does more.
11136 We must also strip off INDIRECT_EXPR for C++ reference
11137 parameters. */
11138 while (TREE_CODE (arg) == NOP_EXPR
11139 || TREE_CODE (arg) == CONVERT_EXPR
11140 || TREE_CODE (arg) == NON_LVALUE_EXPR
11141 || TREE_CODE (arg) == INDIRECT_REF)
11142 arg = TREE_OPERAND (arg, 0);
11143 if (arg != last_parm)
11145 /* FIXME: Sometimes with the tree optimizers we can get the
11146 not the last argument even though the user used the last
11147 argument. We just warn and set the arg to be the last
11148 argument so that we will get wrong-code because of
11149 it. */
11150 warning (0, "second parameter of %<va_start%> not last named argument");
11152 /* We want to verify the second parameter just once before the tree
11153 optimizers are run and then avoid keeping it in the tree,
11154 as otherwise we could warn even for correct code like:
11155 void foo (int i, ...)
11156 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11157 if (va_start_p)
11158 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11159 else
11160 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11162 return false;
11166 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11167 ORIG may be null if this is a 2-argument call. We don't attempt to
11168 simplify calls with more than 3 arguments.
11170 Return NULL_TREE if no simplification was possible, otherwise return the
11171 simplified form of the call as a tree. If IGNORED is true, it means that
11172 the caller does not use the returned value of the function. */
11174 static tree
11175 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11177 tree call, retval;
11178 const char *fmt_str = NULL;
11180 /* Verify the required arguments in the original call. We deal with two
11181 types of sprintf() calls: 'sprintf (str, fmt)' and
11182 'sprintf (dest, "%s", orig)'. */
11183 if (!validate_arg (dest, POINTER_TYPE)
11184 || !validate_arg (fmt, POINTER_TYPE))
11185 return NULL_TREE;
11186 if (orig && !validate_arg (orig, POINTER_TYPE))
11187 return NULL_TREE;
11189 /* Check whether the format is a literal string constant. */
11190 fmt_str = c_getstr (fmt);
11191 if (fmt_str == NULL)
11192 return NULL_TREE;
11194 call = NULL_TREE;
11195 retval = NULL_TREE;
11197 if (!init_target_chars ())
11198 return NULL_TREE;
11200 /* If the format doesn't contain % args or %%, use strcpy. */
11201 if (strchr (fmt_str, target_percent) == NULL)
11203 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11205 if (!fn)
11206 return NULL_TREE;
11208 /* Don't optimize sprintf (buf, "abc", ptr++). */
11209 if (orig)
11210 return NULL_TREE;
11212 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11213 'format' is known to contain no % formats. */
11214 call = build_call_expr (fn, 2, dest, fmt);
11215 if (!ignored)
11216 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11219 /* If the format is "%s", use strcpy if the result isn't used. */
11220 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11222 tree fn;
11223 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11225 if (!fn)
11226 return NULL_TREE;
11228 /* Don't crash on sprintf (str1, "%s"). */
11229 if (!orig)
11230 return NULL_TREE;
11232 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11233 if (!ignored)
11235 retval = c_strlen (orig, 1);
11236 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11237 return NULL_TREE;
11239 call = build_call_expr (fn, 2, dest, orig);
11242 if (call && retval)
11244 retval = fold_convert
11245 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11246 retval);
11247 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11249 else
11250 return call;
11253 /* Expand a call EXP to __builtin_object_size. */
11256 expand_builtin_object_size (tree exp)
11258 tree ost;
11259 int object_size_type;
11260 tree fndecl = get_callee_fndecl (exp);
11261 location_t locus = EXPR_LOCATION (exp);
11263 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11265 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11266 &locus, fndecl);
11267 expand_builtin_trap ();
11268 return const0_rtx;
11271 ost = CALL_EXPR_ARG (exp, 1);
11272 STRIP_NOPS (ost);
11274 if (TREE_CODE (ost) != INTEGER_CST
11275 || tree_int_cst_sgn (ost) < 0
11276 || compare_tree_int (ost, 3) > 0)
11278 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11279 &locus, fndecl);
11280 expand_builtin_trap ();
11281 return const0_rtx;
11284 object_size_type = tree_low_cst (ost, 0);
11286 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11289 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11290 FCODE is the BUILT_IN_* to use.
11291 Return NULL_RTX if we failed; the caller should emit a normal call,
11292 otherwise try to get the result in TARGET, if convenient (and in
11293 mode MODE if that's convenient). */
11295 static rtx
11296 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11297 enum built_in_function fcode)
11299 tree dest, src, len, size;
11301 if (!validate_arglist (exp,
11302 POINTER_TYPE,
11303 fcode == BUILT_IN_MEMSET_CHK
11304 ? INTEGER_TYPE : POINTER_TYPE,
11305 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11306 return NULL_RTX;
11308 dest = CALL_EXPR_ARG (exp, 0);
11309 src = CALL_EXPR_ARG (exp, 1);
11310 len = CALL_EXPR_ARG (exp, 2);
11311 size = CALL_EXPR_ARG (exp, 3);
11313 if (! host_integerp (size, 1))
11314 return NULL_RTX;
11316 if (host_integerp (len, 1) || integer_all_onesp (size))
11318 tree fn;
11320 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11322 location_t locus = EXPR_LOCATION (exp);
11323 warning (0, "%Hcall to %D will always overflow destination buffer",
11324 &locus, get_callee_fndecl (exp));
11325 return NULL_RTX;
11328 fn = NULL_TREE;
11329 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11330 mem{cpy,pcpy,move,set} is available. */
11331 switch (fcode)
11333 case BUILT_IN_MEMCPY_CHK:
11334 fn = built_in_decls[BUILT_IN_MEMCPY];
11335 break;
11336 case BUILT_IN_MEMPCPY_CHK:
11337 fn = built_in_decls[BUILT_IN_MEMPCPY];
11338 break;
11339 case BUILT_IN_MEMMOVE_CHK:
11340 fn = built_in_decls[BUILT_IN_MEMMOVE];
11341 break;
11342 case BUILT_IN_MEMSET_CHK:
11343 fn = built_in_decls[BUILT_IN_MEMSET];
11344 break;
11345 default:
11346 break;
11349 if (! fn)
11350 return NULL_RTX;
11352 fn = build_call_expr (fn, 3, dest, src, len);
11353 if (TREE_CODE (fn) == CALL_EXPR)
11354 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11355 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11357 else if (fcode == BUILT_IN_MEMSET_CHK)
11358 return NULL_RTX;
11359 else
11361 unsigned int dest_align
11362 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11364 /* If DEST is not a pointer type, call the normal function. */
11365 if (dest_align == 0)
11366 return NULL_RTX;
11368 /* If SRC and DEST are the same (and not volatile), do nothing. */
11369 if (operand_equal_p (src, dest, 0))
11371 tree expr;
11373 if (fcode != BUILT_IN_MEMPCPY_CHK)
11375 /* Evaluate and ignore LEN in case it has side-effects. */
11376 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11377 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11380 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11381 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11384 /* __memmove_chk special case. */
11385 if (fcode == BUILT_IN_MEMMOVE_CHK)
11387 unsigned int src_align
11388 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11390 if (src_align == 0)
11391 return NULL_RTX;
11393 /* If src is categorized for a readonly section we can use
11394 normal __memcpy_chk. */
11395 if (readonly_data_expr (src))
11397 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11398 if (!fn)
11399 return NULL_RTX;
11400 fn = build_call_expr (fn, 4, dest, src, len, size);
11401 if (TREE_CODE (fn) == CALL_EXPR)
11402 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11403 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11406 return NULL_RTX;
11410 /* Emit warning if a buffer overflow is detected at compile time. */
11412 static void
11413 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11415 int is_strlen = 0;
11416 tree len, size;
11417 location_t locus;
11419 switch (fcode)
11421 case BUILT_IN_STRCPY_CHK:
11422 case BUILT_IN_STPCPY_CHK:
11423 /* For __strcat_chk the warning will be emitted only if overflowing
11424 by at least strlen (dest) + 1 bytes. */
11425 case BUILT_IN_STRCAT_CHK:
11426 len = CALL_EXPR_ARG (exp, 1);
11427 size = CALL_EXPR_ARG (exp, 2);
11428 is_strlen = 1;
11429 break;
11430 case BUILT_IN_STRNCAT_CHK:
11431 case BUILT_IN_STRNCPY_CHK:
11432 len = CALL_EXPR_ARG (exp, 2);
11433 size = CALL_EXPR_ARG (exp, 3);
11434 break;
11435 case BUILT_IN_SNPRINTF_CHK:
11436 case BUILT_IN_VSNPRINTF_CHK:
11437 len = CALL_EXPR_ARG (exp, 1);
11438 size = CALL_EXPR_ARG (exp, 3);
11439 break;
11440 default:
11441 gcc_unreachable ();
11444 if (!len || !size)
11445 return;
11447 if (! host_integerp (size, 1) || integer_all_onesp (size))
11448 return;
11450 if (is_strlen)
11452 len = c_strlen (len, 1);
11453 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11454 return;
11456 else if (fcode == BUILT_IN_STRNCAT_CHK)
11458 tree src = CALL_EXPR_ARG (exp, 1);
11459 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11460 return;
11461 src = c_strlen (src, 1);
11462 if (! src || ! host_integerp (src, 1))
11464 locus = EXPR_LOCATION (exp);
11465 warning (0, "%Hcall to %D might overflow destination buffer",
11466 &locus, get_callee_fndecl (exp));
11467 return;
11469 else if (tree_int_cst_lt (src, size))
11470 return;
11472 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11473 return;
11475 locus = EXPR_LOCATION (exp);
11476 warning (0, "%Hcall to %D will always overflow destination buffer",
11477 &locus, get_callee_fndecl (exp));
11480 /* Emit warning if a buffer overflow is detected at compile time
11481 in __sprintf_chk/__vsprintf_chk calls. */
11483 static void
11484 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11486 tree dest, size, len, fmt, flag;
11487 const char *fmt_str;
11488 int nargs = call_expr_nargs (exp);
11490 /* Verify the required arguments in the original call. */
11492 if (nargs < 4)
11493 return;
11494 dest = CALL_EXPR_ARG (exp, 0);
11495 flag = CALL_EXPR_ARG (exp, 1);
11496 size = CALL_EXPR_ARG (exp, 2);
11497 fmt = CALL_EXPR_ARG (exp, 3);
11499 if (! host_integerp (size, 1) || integer_all_onesp (size))
11500 return;
11502 /* Check whether the format is a literal string constant. */
11503 fmt_str = c_getstr (fmt);
11504 if (fmt_str == NULL)
11505 return;
11507 if (!init_target_chars ())
11508 return;
11510 /* If the format doesn't contain % args or %%, we know its size. */
11511 if (strchr (fmt_str, target_percent) == 0)
11512 len = build_int_cstu (size_type_node, strlen (fmt_str));
11513 /* If the format is "%s" and first ... argument is a string literal,
11514 we know it too. */
11515 else if (fcode == BUILT_IN_SPRINTF_CHK
11516 && strcmp (fmt_str, target_percent_s) == 0)
11518 tree arg;
11520 if (nargs < 5)
11521 return;
11522 arg = CALL_EXPR_ARG (exp, 4);
11523 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11524 return;
11526 len = c_strlen (arg, 1);
11527 if (!len || ! host_integerp (len, 1))
11528 return;
11530 else
11531 return;
11533 if (! tree_int_cst_lt (len, size))
11535 location_t locus = EXPR_LOCATION (exp);
11536 warning (0, "%Hcall to %D will always overflow destination buffer",
11537 &locus, get_callee_fndecl (exp));
11541 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11542 if possible. */
11544 tree
11545 fold_builtin_object_size (tree ptr, tree ost)
11547 tree ret = NULL_TREE;
11548 int object_size_type;
11550 if (!validate_arg (ptr, POINTER_TYPE)
11551 || !validate_arg (ost, INTEGER_TYPE))
11552 return NULL_TREE;
11554 STRIP_NOPS (ost);
11556 if (TREE_CODE (ost) != INTEGER_CST
11557 || tree_int_cst_sgn (ost) < 0
11558 || compare_tree_int (ost, 3) > 0)
11559 return NULL_TREE;
11561 object_size_type = tree_low_cst (ost, 0);
11563 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11564 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11565 and (size_t) 0 for types 2 and 3. */
11566 if (TREE_SIDE_EFFECTS (ptr))
11567 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11569 if (TREE_CODE (ptr) == ADDR_EXPR)
11570 ret = build_int_cstu (size_type_node,
11571 compute_builtin_object_size (ptr, object_size_type));
11573 else if (TREE_CODE (ptr) == SSA_NAME)
11575 unsigned HOST_WIDE_INT bytes;
11577 /* If object size is not known yet, delay folding until
11578 later. Maybe subsequent passes will help determining
11579 it. */
11580 bytes = compute_builtin_object_size (ptr, object_size_type);
11581 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11582 ? -1 : 0))
11583 ret = build_int_cstu (size_type_node, bytes);
11586 if (ret)
11588 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11589 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11590 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11591 ret = NULL_TREE;
11594 return ret;
11597 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11598 DEST, SRC, LEN, and SIZE are the arguments to the call.
11599 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11600 code of the builtin. If MAXLEN is not NULL, it is maximum length
11601 passed as third argument. */
11603 tree
11604 fold_builtin_memory_chk (tree fndecl,
11605 tree dest, tree src, tree len, tree size,
11606 tree maxlen, bool ignore,
11607 enum built_in_function fcode)
11609 tree fn;
11611 if (!validate_arg (dest, POINTER_TYPE)
11612 || !validate_arg (src,
11613 (fcode == BUILT_IN_MEMSET_CHK
11614 ? INTEGER_TYPE : POINTER_TYPE))
11615 || !validate_arg (len, INTEGER_TYPE)
11616 || !validate_arg (size, INTEGER_TYPE))
11617 return NULL_TREE;
11619 /* If SRC and DEST are the same (and not volatile), return DEST
11620 (resp. DEST+LEN for __mempcpy_chk). */
11621 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11623 if (fcode != BUILT_IN_MEMPCPY_CHK)
11624 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11625 else
11627 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11628 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11632 if (! host_integerp (size, 1))
11633 return NULL_TREE;
11635 if (! integer_all_onesp (size))
11637 if (! host_integerp (len, 1))
11639 /* If LEN is not constant, try MAXLEN too.
11640 For MAXLEN only allow optimizing into non-_ocs function
11641 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11642 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11644 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11646 /* (void) __mempcpy_chk () can be optimized into
11647 (void) __memcpy_chk (). */
11648 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11649 if (!fn)
11650 return NULL_TREE;
11652 return build_call_expr (fn, 4, dest, src, len, size);
11654 return NULL_TREE;
11657 else
11658 maxlen = len;
11660 if (tree_int_cst_lt (size, maxlen))
11661 return NULL_TREE;
11664 fn = NULL_TREE;
11665 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11666 mem{cpy,pcpy,move,set} is available. */
11667 switch (fcode)
11669 case BUILT_IN_MEMCPY_CHK:
11670 fn = built_in_decls[BUILT_IN_MEMCPY];
11671 break;
11672 case BUILT_IN_MEMPCPY_CHK:
11673 fn = built_in_decls[BUILT_IN_MEMPCPY];
11674 break;
11675 case BUILT_IN_MEMMOVE_CHK:
11676 fn = built_in_decls[BUILT_IN_MEMMOVE];
11677 break;
11678 case BUILT_IN_MEMSET_CHK:
11679 fn = built_in_decls[BUILT_IN_MEMSET];
11680 break;
11681 default:
11682 break;
11685 if (!fn)
11686 return NULL_TREE;
11688 return build_call_expr (fn, 3, dest, src, len);
11691 /* Fold a call to the __st[rp]cpy_chk builtin.
11692 DEST, SRC, and SIZE are the arguments to the call.
11693 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11694 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11695 strings passed as second argument. */
11697 tree
11698 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11699 tree maxlen, bool ignore,
11700 enum built_in_function fcode)
11702 tree len, fn;
11704 if (!validate_arg (dest, POINTER_TYPE)
11705 || !validate_arg (src, POINTER_TYPE)
11706 || !validate_arg (size, INTEGER_TYPE))
11707 return NULL_TREE;
11709 /* If SRC and DEST are the same (and not volatile), return DEST. */
11710 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11711 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11713 if (! host_integerp (size, 1))
11714 return NULL_TREE;
11716 if (! integer_all_onesp (size))
11718 len = c_strlen (src, 1);
11719 if (! len || ! host_integerp (len, 1))
11721 /* If LEN is not constant, try MAXLEN too.
11722 For MAXLEN only allow optimizing into non-_ocs function
11723 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11724 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11726 if (fcode == BUILT_IN_STPCPY_CHK)
11728 if (! ignore)
11729 return NULL_TREE;
11731 /* If return value of __stpcpy_chk is ignored,
11732 optimize into __strcpy_chk. */
11733 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11734 if (!fn)
11735 return NULL_TREE;
11737 return build_call_expr (fn, 3, dest, src, size);
11740 if (! len || TREE_SIDE_EFFECTS (len))
11741 return NULL_TREE;
11743 /* If c_strlen returned something, but not a constant,
11744 transform __strcpy_chk into __memcpy_chk. */
11745 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11746 if (!fn)
11747 return NULL_TREE;
11749 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11750 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11751 build_call_expr (fn, 4,
11752 dest, src, len, size));
11755 else
11756 maxlen = len;
11758 if (! tree_int_cst_lt (maxlen, size))
11759 return NULL_TREE;
11762 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11763 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
11764 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
11765 if (!fn)
11766 return NULL_TREE;
11768 return build_call_expr (fn, 2, dest, src);
11771 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11772 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11773 length passed as third argument. */
11775 tree
11776 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
11777 tree maxlen)
11779 tree fn;
11781 if (!validate_arg (dest, POINTER_TYPE)
11782 || !validate_arg (src, POINTER_TYPE)
11783 || !validate_arg (len, INTEGER_TYPE)
11784 || !validate_arg (size, INTEGER_TYPE))
11785 return NULL_TREE;
11787 if (! host_integerp (size, 1))
11788 return NULL_TREE;
11790 if (! integer_all_onesp (size))
11792 if (! host_integerp (len, 1))
11794 /* If LEN is not constant, try MAXLEN too.
11795 For MAXLEN only allow optimizing into non-_ocs function
11796 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11797 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11798 return NULL_TREE;
11800 else
11801 maxlen = len;
11803 if (tree_int_cst_lt (size, maxlen))
11804 return NULL_TREE;
11807 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11808 fn = built_in_decls[BUILT_IN_STRNCPY];
11809 if (!fn)
11810 return NULL_TREE;
11812 return build_call_expr (fn, 3, dest, src, len);
11815 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11816 are the arguments to the call. */
11818 static tree
11819 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
11821 tree fn;
11822 const char *p;
11824 if (!validate_arg (dest, POINTER_TYPE)
11825 || !validate_arg (src, POINTER_TYPE)
11826 || !validate_arg (size, INTEGER_TYPE))
11827 return NULL_TREE;
11829 p = c_getstr (src);
11830 /* If the SRC parameter is "", return DEST. */
11831 if (p && *p == '\0')
11832 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11834 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
11835 return NULL_TREE;
11837 /* If __builtin_strcat_chk is used, assume strcat is available. */
11838 fn = built_in_decls[BUILT_IN_STRCAT];
11839 if (!fn)
11840 return NULL_TREE;
11842 return build_call_expr (fn, 2, dest, src);
11845 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11846 LEN, and SIZE. */
11848 static tree
11849 fold_builtin_strncat_chk (tree fndecl,
11850 tree dest, tree src, tree len, tree size)
11852 tree fn;
11853 const char *p;
11855 if (!validate_arg (dest, POINTER_TYPE)
11856 || !validate_arg (src, POINTER_TYPE)
11857 || !validate_arg (size, INTEGER_TYPE)
11858 || !validate_arg (size, INTEGER_TYPE))
11859 return NULL_TREE;
11861 p = c_getstr (src);
11862 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11863 if (p && *p == '\0')
11864 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11865 else if (integer_zerop (len))
11866 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11868 if (! host_integerp (size, 1))
11869 return NULL_TREE;
11871 if (! integer_all_onesp (size))
11873 tree src_len = c_strlen (src, 1);
11874 if (src_len
11875 && host_integerp (src_len, 1)
11876 && host_integerp (len, 1)
11877 && ! tree_int_cst_lt (len, src_len))
11879 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11880 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
11881 if (!fn)
11882 return NULL_TREE;
11884 return build_call_expr (fn, 3, dest, src, size);
11886 return NULL_TREE;
11889 /* If __builtin_strncat_chk is used, assume strncat is available. */
11890 fn = built_in_decls[BUILT_IN_STRNCAT];
11891 if (!fn)
11892 return NULL_TREE;
11894 return build_call_expr (fn, 3, dest, src, len);
11897 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
11898 a normal call should be emitted rather than expanding the function
11899 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
11901 static tree
11902 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
11904 tree dest, size, len, fn, fmt, flag;
11905 const char *fmt_str;
11906 int nargs = call_expr_nargs (exp);
11908 /* Verify the required arguments in the original call. */
11909 if (nargs < 4)
11910 return NULL_TREE;
11911 dest = CALL_EXPR_ARG (exp, 0);
11912 if (!validate_arg (dest, POINTER_TYPE))
11913 return NULL_TREE;
11914 flag = CALL_EXPR_ARG (exp, 1);
11915 if (!validate_arg (flag, INTEGER_TYPE))
11916 return NULL_TREE;
11917 size = CALL_EXPR_ARG (exp, 2);
11918 if (!validate_arg (size, INTEGER_TYPE))
11919 return NULL_TREE;
11920 fmt = CALL_EXPR_ARG (exp, 3);
11921 if (!validate_arg (fmt, POINTER_TYPE))
11922 return NULL_TREE;
11924 if (! host_integerp (size, 1))
11925 return NULL_TREE;
11927 len = NULL_TREE;
11929 if (!init_target_chars ())
11930 return NULL_TREE;
11932 /* Check whether the format is a literal string constant. */
11933 fmt_str = c_getstr (fmt);
11934 if (fmt_str != NULL)
11936 /* If the format doesn't contain % args or %%, we know the size. */
11937 if (strchr (fmt_str, target_percent) == 0)
11939 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
11940 len = build_int_cstu (size_type_node, strlen (fmt_str));
11942 /* If the format is "%s" and first ... argument is a string literal,
11943 we know the size too. */
11944 else if (fcode == BUILT_IN_SPRINTF_CHK
11945 && strcmp (fmt_str, target_percent_s) == 0)
11947 tree arg;
11949 if (nargs == 5)
11951 arg = CALL_EXPR_ARG (exp, 4);
11952 if (validate_arg (arg, POINTER_TYPE))
11954 len = c_strlen (arg, 1);
11955 if (! len || ! host_integerp (len, 1))
11956 len = NULL_TREE;
11962 if (! integer_all_onesp (size))
11964 if (! len || ! tree_int_cst_lt (len, size))
11965 return NULL_TREE;
11968 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
11969 or if format doesn't contain % chars or is "%s". */
11970 if (! integer_zerop (flag))
11972 if (fmt_str == NULL)
11973 return NULL_TREE;
11974 if (strchr (fmt_str, target_percent) != NULL
11975 && strcmp (fmt_str, target_percent_s))
11976 return NULL_TREE;
11979 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
11980 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
11981 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
11982 if (!fn)
11983 return NULL_TREE;
11985 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
11988 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
11989 a normal call should be emitted rather than expanding the function
11990 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
11991 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
11992 passed as second argument. */
11994 tree
11995 fold_builtin_snprintf_chk (tree exp, tree maxlen,
11996 enum built_in_function fcode)
11998 tree dest, size, len, fn, fmt, flag;
11999 const char *fmt_str;
12001 /* Verify the required arguments in the original call. */
12002 if (call_expr_nargs (exp) < 5)
12003 return NULL_TREE;
12004 dest = CALL_EXPR_ARG (exp, 0);
12005 if (!validate_arg (dest, POINTER_TYPE))
12006 return NULL_TREE;
12007 len = CALL_EXPR_ARG (exp, 1);
12008 if (!validate_arg (len, INTEGER_TYPE))
12009 return NULL_TREE;
12010 flag = CALL_EXPR_ARG (exp, 2);
12011 if (!validate_arg (flag, INTEGER_TYPE))
12012 return NULL_TREE;
12013 size = CALL_EXPR_ARG (exp, 3);
12014 if (!validate_arg (size, INTEGER_TYPE))
12015 return NULL_TREE;
12016 fmt = CALL_EXPR_ARG (exp, 4);
12017 if (!validate_arg (fmt, POINTER_TYPE))
12018 return NULL_TREE;
12020 if (! host_integerp (size, 1))
12021 return NULL_TREE;
12023 if (! integer_all_onesp (size))
12025 if (! host_integerp (len, 1))
12027 /* If LEN is not constant, try MAXLEN too.
12028 For MAXLEN only allow optimizing into non-_ocs function
12029 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12030 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12031 return NULL_TREE;
12033 else
12034 maxlen = len;
12036 if (tree_int_cst_lt (size, maxlen))
12037 return NULL_TREE;
12040 if (!init_target_chars ())
12041 return NULL_TREE;
12043 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12044 or if format doesn't contain % chars or is "%s". */
12045 if (! integer_zerop (flag))
12047 fmt_str = c_getstr (fmt);
12048 if (fmt_str == NULL)
12049 return NULL_TREE;
12050 if (strchr (fmt_str, target_percent) != NULL
12051 && strcmp (fmt_str, target_percent_s))
12052 return NULL_TREE;
12055 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12056 available. */
12057 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12058 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12059 if (!fn)
12060 return NULL_TREE;
12062 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12065 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12066 FMT and ARG are the arguments to the call; we don't fold cases with
12067 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12069 Return NULL_TREE if no simplification was possible, otherwise return the
12070 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12071 code of the function to be simplified. */
12073 static tree
12074 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12075 enum built_in_function fcode)
12077 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12078 const char *fmt_str = NULL;
12080 /* If the return value is used, don't do the transformation. */
12081 if (! ignore)
12082 return NULL_TREE;
12084 /* Verify the required arguments in the original call. */
12085 if (!validate_arg (fmt, POINTER_TYPE))
12086 return NULL_TREE;
12088 /* Check whether the format is a literal string constant. */
12089 fmt_str = c_getstr (fmt);
12090 if (fmt_str == NULL)
12091 return NULL_TREE;
12093 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12095 /* If we're using an unlocked function, assume the other
12096 unlocked functions exist explicitly. */
12097 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12098 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12100 else
12102 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12103 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12106 if (!init_target_chars ())
12107 return NULL_TREE;
12109 if (strcmp (fmt_str, target_percent_s) == 0
12110 || strchr (fmt_str, target_percent) == NULL)
12112 const char *str;
12114 if (strcmp (fmt_str, target_percent_s) == 0)
12116 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12117 return NULL_TREE;
12119 if (!arg || !validate_arg (arg, POINTER_TYPE))
12120 return NULL_TREE;
12122 str = c_getstr (arg);
12123 if (str == NULL)
12124 return NULL_TREE;
12126 else
12128 /* The format specifier doesn't contain any '%' characters. */
12129 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12130 && arg)
12131 return NULL_TREE;
12132 str = fmt_str;
12135 /* If the string was "", printf does nothing. */
12136 if (str[0] == '\0')
12137 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12139 /* If the string has length of 1, call putchar. */
12140 if (str[1] == '\0')
12142 /* Given printf("c"), (where c is any one character,)
12143 convert "c"[0] to an int and pass that to the replacement
12144 function. */
12145 newarg = build_int_cst (NULL_TREE, str[0]);
12146 if (fn_putchar)
12147 call = build_call_expr (fn_putchar, 1, newarg);
12149 else
12151 /* If the string was "string\n", call puts("string"). */
12152 size_t len = strlen (str);
12153 if ((unsigned char)str[len - 1] == target_newline)
12155 /* Create a NUL-terminated string that's one char shorter
12156 than the original, stripping off the trailing '\n'. */
12157 char *newstr = alloca (len);
12158 memcpy (newstr, str, len - 1);
12159 newstr[len - 1] = 0;
12161 newarg = build_string_literal (len, newstr);
12162 if (fn_puts)
12163 call = build_call_expr (fn_puts, 1, newarg);
12165 else
12166 /* We'd like to arrange to call fputs(string,stdout) here,
12167 but we need stdout and don't have a way to get it yet. */
12168 return NULL_TREE;
12172 /* The other optimizations can be done only on the non-va_list variants. */
12173 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12174 return NULL_TREE;
12176 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12177 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12179 if (!arg || !validate_arg (arg, POINTER_TYPE))
12180 return NULL_TREE;
12181 if (fn_puts)
12182 call = build_call_expr (fn_puts, 1, arg);
12185 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12186 else if (strcmp (fmt_str, target_percent_c) == 0)
12188 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12189 return NULL_TREE;
12190 if (fn_putchar)
12191 call = build_call_expr (fn_putchar, 1, arg);
12194 if (!call)
12195 return NULL_TREE;
12197 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12200 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12201 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12202 more than 3 arguments, and ARG may be null in the 2-argument case.
12204 Return NULL_TREE if no simplification was possible, otherwise return the
12205 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12206 code of the function to be simplified. */
12208 static tree
12209 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12210 enum built_in_function fcode)
12212 tree fn_fputc, fn_fputs, call = NULL_TREE;
12213 const char *fmt_str = NULL;
12215 /* If the return value is used, don't do the transformation. */
12216 if (! ignore)
12217 return NULL_TREE;
12219 /* Verify the required arguments in the original call. */
12220 if (!validate_arg (fp, POINTER_TYPE))
12221 return NULL_TREE;
12222 if (!validate_arg (fmt, POINTER_TYPE))
12223 return NULL_TREE;
12225 /* Check whether the format is a literal string constant. */
12226 fmt_str = c_getstr (fmt);
12227 if (fmt_str == NULL)
12228 return NULL_TREE;
12230 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12232 /* If we're using an unlocked function, assume the other
12233 unlocked functions exist explicitly. */
12234 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12235 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12237 else
12239 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12240 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12243 if (!init_target_chars ())
12244 return NULL_TREE;
12246 /* If the format doesn't contain % args or %%, use strcpy. */
12247 if (strchr (fmt_str, target_percent) == NULL)
12249 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12250 && arg)
12251 return NULL_TREE;
12253 /* If the format specifier was "", fprintf does nothing. */
12254 if (fmt_str[0] == '\0')
12256 /* If FP has side-effects, just wait until gimplification is
12257 done. */
12258 if (TREE_SIDE_EFFECTS (fp))
12259 return NULL_TREE;
12261 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12264 /* When "string" doesn't contain %, replace all cases of
12265 fprintf (fp, string) with fputs (string, fp). The fputs
12266 builtin will take care of special cases like length == 1. */
12267 if (fn_fputs)
12268 call = build_call_expr (fn_fputs, 2, fmt, fp);
12271 /* The other optimizations can be done only on the non-va_list variants. */
12272 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12273 return NULL_TREE;
12275 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12276 else if (strcmp (fmt_str, target_percent_s) == 0)
12278 if (!arg || !validate_arg (arg, POINTER_TYPE))
12279 return NULL_TREE;
12280 if (fn_fputs)
12281 call = build_call_expr (fn_fputs, 2, arg, fp);
12284 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12285 else if (strcmp (fmt_str, target_percent_c) == 0)
12287 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12288 return NULL_TREE;
12289 if (fn_fputc)
12290 call = build_call_expr (fn_fputc, 2, arg, fp);
12293 if (!call)
12294 return NULL_TREE;
12295 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12298 /* Initialize format string characters in the target charset. */
12300 static bool
12301 init_target_chars (void)
12303 static bool init;
12304 if (!init)
12306 target_newline = lang_hooks.to_target_charset ('\n');
12307 target_percent = lang_hooks.to_target_charset ('%');
12308 target_c = lang_hooks.to_target_charset ('c');
12309 target_s = lang_hooks.to_target_charset ('s');
12310 if (target_newline == 0 || target_percent == 0 || target_c == 0
12311 || target_s == 0)
12312 return false;
12314 target_percent_c[0] = target_percent;
12315 target_percent_c[1] = target_c;
12316 target_percent_c[2] = '\0';
12318 target_percent_s[0] = target_percent;
12319 target_percent_s[1] = target_s;
12320 target_percent_s[2] = '\0';
12322 target_percent_s_newline[0] = target_percent;
12323 target_percent_s_newline[1] = target_s;
12324 target_percent_s_newline[2] = target_newline;
12325 target_percent_s_newline[3] = '\0';
12327 init = true;
12329 return true;
12332 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12333 and no overflow/underflow occurred. INEXACT is true if M was not
12334 exactly calculated. TYPE is the tree type for the result. This
12335 function assumes that you cleared the MPFR flags and then
12336 calculated M to see if anything subsequently set a flag prior to
12337 entering this function. Return NULL_TREE if any checks fail. */
12339 static tree
12340 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12342 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12343 overflow/underflow occurred. If -frounding-math, proceed iff the
12344 result of calling FUNC was exact. */
12345 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12346 && (!flag_rounding_math || !inexact))
12348 REAL_VALUE_TYPE rr;
12350 real_from_mpfr (&rr, m, type, GMP_RNDN);
12351 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12352 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12353 but the mpft_t is not, then we underflowed in the
12354 conversion. */
12355 if (real_isfinite (&rr)
12356 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12358 REAL_VALUE_TYPE rmode;
12360 real_convert (&rmode, TYPE_MODE (type), &rr);
12361 /* Proceed iff the specified mode can hold the value. */
12362 if (real_identical (&rmode, &rr))
12363 return build_real (type, rmode);
12366 return NULL_TREE;
12369 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12370 FUNC on it and return the resulting value as a tree with type TYPE.
12371 If MIN and/or MAX are not NULL, then the supplied ARG must be
12372 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12373 acceptable values, otherwise they are not. The mpfr precision is
12374 set to the precision of TYPE. We assume that function FUNC returns
12375 zero if the result could be calculated exactly within the requested
12376 precision. */
12378 static tree
12379 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12380 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12381 bool inclusive)
12383 tree result = NULL_TREE;
12385 STRIP_NOPS (arg);
12387 /* To proceed, MPFR must exactly represent the target floating point
12388 format, which only happens when the target base equals two. */
12389 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12390 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12392 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12394 if (real_isfinite (ra)
12395 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12396 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12398 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12399 int inexact;
12400 mpfr_t m;
12402 mpfr_init2 (m, prec);
12403 mpfr_from_real (m, ra, GMP_RNDN);
12404 mpfr_clear_flags ();
12405 inexact = func (m, m, GMP_RNDN);
12406 result = do_mpfr_ckconv (m, type, inexact);
12407 mpfr_clear (m);
12411 return result;
12414 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12415 FUNC on it and return the resulting value as a tree with type TYPE.
12416 The mpfr precision is set to the precision of TYPE. We assume that
12417 function FUNC returns zero if the result could be calculated
12418 exactly within the requested precision. */
12420 static tree
12421 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12422 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12424 tree result = NULL_TREE;
12426 STRIP_NOPS (arg1);
12427 STRIP_NOPS (arg2);
12429 /* To proceed, MPFR must exactly represent the target floating point
12430 format, which only happens when the target base equals two. */
12431 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12432 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12433 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12435 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12436 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12438 if (real_isfinite (ra1) && real_isfinite (ra2))
12440 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12441 int inexact;
12442 mpfr_t m1, m2;
12444 mpfr_inits2 (prec, m1, m2, NULL);
12445 mpfr_from_real (m1, ra1, GMP_RNDN);
12446 mpfr_from_real (m2, ra2, GMP_RNDN);
12447 mpfr_clear_flags ();
12448 inexact = func (m1, m1, m2, GMP_RNDN);
12449 result = do_mpfr_ckconv (m1, type, inexact);
12450 mpfr_clears (m1, m2, NULL);
12454 return result;
12457 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12458 FUNC on it and return the resulting value as a tree with type TYPE.
12459 The mpfr precision is set to the precision of TYPE. We assume that
12460 function FUNC returns zero if the result could be calculated
12461 exactly within the requested precision. */
12463 static tree
12464 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12465 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12467 tree result = NULL_TREE;
12469 STRIP_NOPS (arg1);
12470 STRIP_NOPS (arg2);
12471 STRIP_NOPS (arg3);
12473 /* To proceed, MPFR must exactly represent the target floating point
12474 format, which only happens when the target base equals two. */
12475 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12476 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12477 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12478 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12480 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12481 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12482 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12484 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12486 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12487 int inexact;
12488 mpfr_t m1, m2, m3;
12490 mpfr_inits2 (prec, m1, m2, m3, NULL);
12491 mpfr_from_real (m1, ra1, GMP_RNDN);
12492 mpfr_from_real (m2, ra2, GMP_RNDN);
12493 mpfr_from_real (m3, ra3, GMP_RNDN);
12494 mpfr_clear_flags ();
12495 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12496 result = do_mpfr_ckconv (m1, type, inexact);
12497 mpfr_clears (m1, m2, m3, NULL);
12501 return result;
12504 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12505 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12506 If ARG_SINP and ARG_COSP are NULL then the result is returned
12507 as a complex value.
12508 The type is taken from the type of ARG and is used for setting the
12509 precision of the calculation and results. */
12511 static tree
12512 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12514 tree const type = TREE_TYPE (arg);
12515 tree result = NULL_TREE;
12517 STRIP_NOPS (arg);
12519 /* To proceed, MPFR must exactly represent the target floating point
12520 format, which only happens when the target base equals two. */
12521 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12522 && TREE_CODE (arg) == REAL_CST
12523 && !TREE_OVERFLOW (arg))
12525 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12527 if (real_isfinite (ra))
12529 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12530 tree result_s, result_c;
12531 int inexact;
12532 mpfr_t m, ms, mc;
12534 mpfr_inits2 (prec, m, ms, mc, NULL);
12535 mpfr_from_real (m, ra, GMP_RNDN);
12536 mpfr_clear_flags ();
12537 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12538 result_s = do_mpfr_ckconv (ms, type, inexact);
12539 result_c = do_mpfr_ckconv (mc, type, inexact);
12540 mpfr_clears (m, ms, mc, NULL);
12541 if (result_s && result_c)
12543 /* If we are to return in a complex value do so. */
12544 if (!arg_sinp && !arg_cosp)
12545 return build_complex (build_complex_type (type),
12546 result_c, result_s);
12548 /* Dereference the sin/cos pointer arguments. */
12549 arg_sinp = build_fold_indirect_ref (arg_sinp);
12550 arg_cosp = build_fold_indirect_ref (arg_cosp);
12551 /* Proceed if valid pointer type were passed in. */
12552 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12553 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12555 /* Set the values. */
12556 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12557 result_s);
12558 TREE_SIDE_EFFECTS (result_s) = 1;
12559 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12560 result_c);
12561 TREE_SIDE_EFFECTS (result_c) = 1;
12562 /* Combine the assignments into a compound expr. */
12563 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12564 result_s, result_c));
12569 return result;
12572 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12573 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12574 two-argument mpfr order N Bessel function FUNC on them and return
12575 the resulting value as a tree with type TYPE. The mpfr precision
12576 is set to the precision of TYPE. We assume that function FUNC
12577 returns zero if the result could be calculated exactly within the
12578 requested precision. */
12579 static tree
12580 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12581 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12582 const REAL_VALUE_TYPE *min, bool inclusive)
12584 tree result = NULL_TREE;
12586 STRIP_NOPS (arg1);
12587 STRIP_NOPS (arg2);
12589 /* To proceed, MPFR must exactly represent the target floating point
12590 format, which only happens when the target base equals two. */
12591 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12592 && host_integerp (arg1, 0)
12593 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12595 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12596 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12598 if (n == (long)n
12599 && real_isfinite (ra)
12600 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12602 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12603 int inexact;
12604 mpfr_t m;
12606 mpfr_init2 (m, prec);
12607 mpfr_from_real (m, ra, GMP_RNDN);
12608 mpfr_clear_flags ();
12609 inexact = func (m, n, m, GMP_RNDN);
12610 result = do_mpfr_ckconv (m, type, inexact);
12611 mpfr_clear (m);
12615 return result;
12618 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12619 the pointer *(ARG_QUO) and return the result. The type is taken
12620 from the type of ARG0 and is used for setting the precision of the
12621 calculation and results. */
12623 static tree
12624 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12626 tree const type = TREE_TYPE (arg0);
12627 tree result = NULL_TREE;
12629 STRIP_NOPS (arg0);
12630 STRIP_NOPS (arg1);
12632 /* To proceed, MPFR must exactly represent the target floating point
12633 format, which only happens when the target base equals two. */
12634 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12635 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12636 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12638 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12639 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12641 if (real_isfinite (ra0) && real_isfinite (ra1))
12643 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12644 tree result_rem;
12645 long integer_quo;
12646 mpfr_t m0, m1;
12648 mpfr_inits2 (prec, m0, m1, NULL);
12649 mpfr_from_real (m0, ra0, GMP_RNDN);
12650 mpfr_from_real (m1, ra1, GMP_RNDN);
12651 mpfr_clear_flags ();
12652 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12653 /* Remquo is independent of the rounding mode, so pass
12654 inexact=0 to do_mpfr_ckconv(). */
12655 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12656 mpfr_clears (m0, m1, NULL);
12657 if (result_rem)
12659 /* MPFR calculates quo in the host's long so it may
12660 return more bits in quo than the target int can hold
12661 if sizeof(host long) > sizeof(target int). This can
12662 happen even for native compilers in LP64 mode. In
12663 these cases, modulo the quo value with the largest
12664 number that the target int can hold while leaving one
12665 bit for the sign. */
12666 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12667 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12669 /* Dereference the quo pointer argument. */
12670 arg_quo = build_fold_indirect_ref (arg_quo);
12671 /* Proceed iff a valid pointer type was passed in. */
12672 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12674 /* Set the value. */
12675 tree result_quo = fold_build2 (MODIFY_EXPR,
12676 TREE_TYPE (arg_quo), arg_quo,
12677 build_int_cst (NULL, integer_quo));
12678 TREE_SIDE_EFFECTS (result_quo) = 1;
12679 /* Combine the quo assignment with the rem. */
12680 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12681 result_quo, result_rem));
12686 return result;
12689 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12690 resulting value as a tree with type TYPE. The mpfr precision is
12691 set to the precision of TYPE. We assume that this mpfr function
12692 returns zero if the result could be calculated exactly within the
12693 requested precision. In addition, the integer pointer represented
12694 by ARG_SG will be dereferenced and set to the appropriate signgam
12695 (-1,1) value. */
12697 static tree
12698 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12700 tree result = NULL_TREE;
12702 STRIP_NOPS (arg);
12704 /* To proceed, MPFR must exactly represent the target floating point
12705 format, which only happens when the target base equals two. Also
12706 verify ARG is a constant and that ARG_SG is an int pointer. */
12707 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12708 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12709 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12710 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12712 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12714 /* In addition to NaN and Inf, the argument cannot be zero or a
12715 negative integer. */
12716 if (real_isfinite (ra)
12717 && ra->cl != rvc_zero
12718 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
12720 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12721 int inexact, sg;
12722 mpfr_t m;
12723 tree result_lg;
12725 mpfr_init2 (m, prec);
12726 mpfr_from_real (m, ra, GMP_RNDN);
12727 mpfr_clear_flags ();
12728 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
12729 result_lg = do_mpfr_ckconv (m, type, inexact);
12730 mpfr_clear (m);
12731 if (result_lg)
12733 tree result_sg;
12735 /* Dereference the arg_sg pointer argument. */
12736 arg_sg = build_fold_indirect_ref (arg_sg);
12737 /* Assign the signgam value into *arg_sg. */
12738 result_sg = fold_build2 (MODIFY_EXPR,
12739 TREE_TYPE (arg_sg), arg_sg,
12740 build_int_cst (NULL, sg));
12741 TREE_SIDE_EFFECTS (result_sg) = 1;
12742 /* Combine the signgam assignment with the lgamma result. */
12743 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12744 result_sg, result_lg));
12749 return result;
12751 #endif