2008-05-07 Kai Tietz <kai,tietz@onevision.com>
[official-gcc.git] / gcc / builtins.c
blob61b427220560e0f8ec8d3ad936a47714211ba135
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tree-gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
67 #undef DEF_BUILTIN
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
85 #endif
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
187 enum tree_code);
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
239 #endif
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
249 return true;
250 if (strncmp (name, "__sync_", 7) == 0)
251 return true;
252 return false;
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
270 return 0;
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
273 return 0;
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
278 while (1)
280 switch (TREE_CODE (exp))
282 case NOP_EXPR:
283 case CONVERT_EXPR:
284 exp = TREE_OPERAND (exp, 0);
285 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
286 return align;
288 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
289 align = MIN (inner, max_align);
290 break;
292 case POINTER_PLUS_EXPR:
293 /* If sum of pointer + int, restrict our maximum alignment to that
294 imposed by the integer. If not, we can't do any better than
295 ALIGN. */
296 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
297 return align;
299 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
300 & (max_align / BITS_PER_UNIT - 1))
301 != 0)
302 max_align >>= 1;
304 exp = TREE_OPERAND (exp, 0);
305 break;
307 case ADDR_EXPR:
308 /* See what we are pointing at and look at its alignment. */
309 exp = TREE_OPERAND (exp, 0);
310 inner = max_align;
311 if (handled_component_p (exp))
313 HOST_WIDE_INT bitsize, bitpos;
314 tree offset;
315 enum machine_mode mode;
316 int unsignedp, volatilep;
318 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
319 &mode, &unsignedp, &volatilep, true);
320 if (bitpos)
321 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
322 if (offset && TREE_CODE (offset) == PLUS_EXPR
323 && host_integerp (TREE_OPERAND (offset, 1), 1))
325 /* Any overflow in calculating offset_bits won't change
326 the alignment. */
327 unsigned offset_bits
328 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
329 * BITS_PER_UNIT);
331 if (offset_bits)
332 inner = MIN (inner, (offset_bits & -offset_bits));
333 offset = TREE_OPERAND (offset, 0);
335 if (offset && TREE_CODE (offset) == MULT_EXPR
336 && host_integerp (TREE_OPERAND (offset, 1), 1))
338 /* Any overflow in calculating offset_factor won't change
339 the alignment. */
340 unsigned offset_factor
341 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
342 * BITS_PER_UNIT);
344 if (offset_factor)
345 inner = MIN (inner, (offset_factor & -offset_factor));
347 else if (offset)
348 inner = MIN (inner, BITS_PER_UNIT);
350 if (DECL_P (exp))
351 align = MIN (inner, DECL_ALIGN (exp));
352 #ifdef CONSTANT_ALIGNMENT
353 else if (CONSTANT_CLASS_P (exp))
354 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
355 #endif
356 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
357 || TREE_CODE (exp) == INDIRECT_REF)
358 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
359 else
360 align = MIN (align, inner);
361 return MIN (align, max_align);
363 default:
364 return align;
369 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
370 way, because it could contain a zero byte in the middle.
371 TREE_STRING_LENGTH is the size of the character array, not the string.
373 ONLY_VALUE should be nonzero if the result is not going to be emitted
374 into the instruction stream and zero if it is going to be expanded.
375 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
376 is returned, otherwise NULL, since
377 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
378 evaluate the side-effects.
380 The value returned is of type `ssizetype'.
382 Unfortunately, string_constant can't access the values of const char
383 arrays with initializers, so neither can we do so here. */
385 tree
386 c_strlen (tree src, int only_value)
388 tree offset_node;
389 HOST_WIDE_INT offset;
390 int max;
391 const char *ptr;
393 STRIP_NOPS (src);
394 if (TREE_CODE (src) == COND_EXPR
395 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
397 tree len1, len2;
399 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
400 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
401 if (tree_int_cst_equal (len1, len2))
402 return len1;
405 if (TREE_CODE (src) == COMPOUND_EXPR
406 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
407 return c_strlen (TREE_OPERAND (src, 1), only_value);
409 src = string_constant (src, &offset_node);
410 if (src == 0)
411 return NULL_TREE;
413 max = TREE_STRING_LENGTH (src) - 1;
414 ptr = TREE_STRING_POINTER (src);
416 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
418 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
419 compute the offset to the following null if we don't know where to
420 start searching for it. */
421 int i;
423 for (i = 0; i < max; i++)
424 if (ptr[i] == 0)
425 return NULL_TREE;
427 /* We don't know the starting offset, but we do know that the string
428 has no internal zero bytes. We can assume that the offset falls
429 within the bounds of the string; otherwise, the programmer deserves
430 what he gets. Subtract the offset from the length of the string,
431 and return that. This would perhaps not be valid if we were dealing
432 with named arrays in addition to literal string constants. */
434 return size_diffop (size_int (max), offset_node);
437 /* We have a known offset into the string. Start searching there for
438 a null character if we can represent it as a single HOST_WIDE_INT. */
439 if (offset_node == 0)
440 offset = 0;
441 else if (! host_integerp (offset_node, 0))
442 offset = -1;
443 else
444 offset = tree_low_cst (offset_node, 0);
446 /* If the offset is known to be out of bounds, warn, and call strlen at
447 runtime. */
448 if (offset < 0 || offset > max)
450 /* Suppress multiple warnings for propagated constant strings. */
451 if (! TREE_NO_WARNING (src))
453 warning (0, "offset outside bounds of constant string");
454 TREE_NO_WARNING (src) = 1;
456 return NULL_TREE;
459 /* Use strlen to search for the first zero byte. Since any strings
460 constructed with build_string will have nulls appended, we win even
461 if we get handed something like (char[4])"abcd".
463 Since OFFSET is our starting index into the string, no further
464 calculation is needed. */
465 return ssize_int (strlen (ptr + offset));
468 /* Return a char pointer for a C string if it is a string constant
469 or sum of string constant and integer constant. */
471 static const char *
472 c_getstr (tree src)
474 tree offset_node;
476 src = string_constant (src, &offset_node);
477 if (src == 0)
478 return 0;
480 if (offset_node == 0)
481 return TREE_STRING_POINTER (src);
482 else if (!host_integerp (offset_node, 1)
483 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
484 return 0;
486 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
489 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
490 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
492 static rtx
493 c_readstr (const char *str, enum machine_mode mode)
495 HOST_WIDE_INT c[2];
496 HOST_WIDE_INT ch;
497 unsigned int i, j;
499 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
501 c[0] = 0;
502 c[1] = 0;
503 ch = 1;
504 for (i = 0; i < GET_MODE_SIZE (mode); i++)
506 j = i;
507 if (WORDS_BIG_ENDIAN)
508 j = GET_MODE_SIZE (mode) - i - 1;
509 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
510 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
511 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
512 j *= BITS_PER_UNIT;
513 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
515 if (ch)
516 ch = (unsigned char) str[i];
517 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
519 return immed_double_const (c[0], c[1], mode);
522 /* Cast a target constant CST to target CHAR and if that value fits into
523 host char type, return zero and put that value into variable pointed to by
524 P. */
526 static int
527 target_char_cast (tree cst, char *p)
529 unsigned HOST_WIDE_INT val, hostval;
531 if (!host_integerp (cst, 1)
532 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
533 return 1;
535 val = tree_low_cst (cst, 1);
536 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
537 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
539 hostval = val;
540 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
541 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
543 if (val != hostval)
544 return 1;
546 *p = hostval;
547 return 0;
550 /* Similar to save_expr, but assumes that arbitrary code is not executed
551 in between the multiple evaluations. In particular, we assume that a
552 non-addressable local variable will not be modified. */
554 static tree
555 builtin_save_expr (tree exp)
557 if (TREE_ADDRESSABLE (exp) == 0
558 && (TREE_CODE (exp) == PARM_DECL
559 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
560 return exp;
562 return save_expr (exp);
565 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
566 times to get the address of either a higher stack frame, or a return
567 address located within it (depending on FNDECL_CODE). */
569 static rtx
570 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
572 int i;
574 #ifdef INITIAL_FRAME_ADDRESS_RTX
575 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
576 #else
577 rtx tem;
579 /* For a zero count with __builtin_return_address, we don't care what
580 frame address we return, because target-specific definitions will
581 override us. Therefore frame pointer elimination is OK, and using
582 the soft frame pointer is OK.
584 For a nonzero count, or a zero count with __builtin_frame_address,
585 we require a stable offset from the current frame pointer to the
586 previous one, so we must use the hard frame pointer, and
587 we must disable frame pointer elimination. */
588 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
589 tem = frame_pointer_rtx;
590 else
592 tem = hard_frame_pointer_rtx;
594 /* Tell reload not to eliminate the frame pointer. */
595 crtl->accesses_prior_frames = 1;
597 #endif
599 /* Some machines need special handling before we can access
600 arbitrary frames. For example, on the SPARC, we must first flush
601 all register windows to the stack. */
602 #ifdef SETUP_FRAME_ADDRESSES
603 if (count > 0)
604 SETUP_FRAME_ADDRESSES ();
605 #endif
607 /* On the SPARC, the return address is not in the frame, it is in a
608 register. There is no way to access it off of the current frame
609 pointer, but it can be accessed off the previous frame pointer by
610 reading the value from the register window save area. */
611 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
612 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
613 count--;
614 #endif
616 /* Scan back COUNT frames to the specified frame. */
617 for (i = 0; i < count; i++)
619 /* Assume the dynamic chain pointer is in the word that the
620 frame address points to, unless otherwise specified. */
621 #ifdef DYNAMIC_CHAIN_ADDRESS
622 tem = DYNAMIC_CHAIN_ADDRESS (tem);
623 #endif
624 tem = memory_address (Pmode, tem);
625 tem = gen_frame_mem (Pmode, tem);
626 tem = copy_to_reg (tem);
629 /* For __builtin_frame_address, return what we've got. But, on
630 the SPARC for example, we may have to add a bias. */
631 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
632 #ifdef FRAME_ADDR_RTX
633 return FRAME_ADDR_RTX (tem);
634 #else
635 return tem;
636 #endif
638 /* For __builtin_return_address, get the return address from that frame. */
639 #ifdef RETURN_ADDR_RTX
640 tem = RETURN_ADDR_RTX (count, tem);
641 #else
642 tem = memory_address (Pmode,
643 plus_constant (tem, GET_MODE_SIZE (Pmode)));
644 tem = gen_frame_mem (Pmode, tem);
645 #endif
646 return tem;
649 /* Alias set used for setjmp buffer. */
650 static alias_set_type setjmp_alias_set = -1;
652 /* Construct the leading half of a __builtin_setjmp call. Control will
653 return to RECEIVER_LABEL. This is also called directly by the SJLJ
654 exception handling code. */
656 void
657 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
659 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
660 rtx stack_save;
661 rtx mem;
663 if (setjmp_alias_set == -1)
664 setjmp_alias_set = new_alias_set ();
666 buf_addr = convert_memory_address (Pmode, buf_addr);
668 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
670 /* We store the frame pointer and the address of receiver_label in
671 the buffer and use the rest of it for the stack save area, which
672 is machine-dependent. */
674 mem = gen_rtx_MEM (Pmode, buf_addr);
675 set_mem_alias_set (mem, setjmp_alias_set);
676 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
678 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
679 set_mem_alias_set (mem, setjmp_alias_set);
681 emit_move_insn (validize_mem (mem),
682 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
684 stack_save = gen_rtx_MEM (sa_mode,
685 plus_constant (buf_addr,
686 2 * GET_MODE_SIZE (Pmode)));
687 set_mem_alias_set (stack_save, setjmp_alias_set);
688 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
690 /* If there is further processing to do, do it. */
691 #ifdef HAVE_builtin_setjmp_setup
692 if (HAVE_builtin_setjmp_setup)
693 emit_insn (gen_builtin_setjmp_setup (buf_addr));
694 #endif
696 /* Tell optimize_save_area_alloca that extra work is going to
697 need to go on during alloca. */
698 cfun->calls_setjmp = 1;
700 /* We have a nonlocal label. */
701 cfun->has_nonlocal_label = 1;
704 /* Construct the trailing part of a __builtin_setjmp call. This is
705 also called directly by the SJLJ exception handling code. */
707 void
708 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
710 /* Clobber the FP when we get here, so we have to make sure it's
711 marked as used by this function. */
712 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
714 /* Mark the static chain as clobbered here so life information
715 doesn't get messed up for it. */
716 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
718 /* Now put in the code to restore the frame pointer, and argument
719 pointer, if needed. */
720 #ifdef HAVE_nonlocal_goto
721 if (! HAVE_nonlocal_goto)
722 #endif
724 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
725 /* This might change the hard frame pointer in ways that aren't
726 apparent to early optimization passes, so force a clobber. */
727 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
730 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
731 if (fixed_regs[ARG_POINTER_REGNUM])
733 #ifdef ELIMINABLE_REGS
734 size_t i;
735 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
737 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
738 if (elim_regs[i].from == ARG_POINTER_REGNUM
739 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
740 break;
742 if (i == ARRAY_SIZE (elim_regs))
743 #endif
745 /* Now restore our arg pointer from the address at which it
746 was saved in our stack frame. */
747 emit_move_insn (virtual_incoming_args_rtx,
748 copy_to_reg (get_arg_pointer_save_area ()));
751 #endif
753 #ifdef HAVE_builtin_setjmp_receiver
754 if (HAVE_builtin_setjmp_receiver)
755 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
756 else
757 #endif
758 #ifdef HAVE_nonlocal_goto_receiver
759 if (HAVE_nonlocal_goto_receiver)
760 emit_insn (gen_nonlocal_goto_receiver ());
761 else
762 #endif
763 { /* Nothing */ }
765 /* We must not allow the code we just generated to be reordered by
766 scheduling. Specifically, the update of the frame pointer must
767 happen immediately, not later. */
768 emit_insn (gen_blockage ());
771 /* __builtin_longjmp is passed a pointer to an array of five words (not
772 all will be used on all machines). It operates similarly to the C
773 library function of the same name, but is more efficient. Much of
774 the code below is copied from the handling of non-local gotos. */
776 static void
777 expand_builtin_longjmp (rtx buf_addr, rtx value)
779 rtx fp, lab, stack, insn, last;
780 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
782 if (setjmp_alias_set == -1)
783 setjmp_alias_set = new_alias_set ();
785 buf_addr = convert_memory_address (Pmode, buf_addr);
787 buf_addr = force_reg (Pmode, buf_addr);
789 /* We used to store value in static_chain_rtx, but that fails if pointers
790 are smaller than integers. We instead require that the user must pass
791 a second argument of 1, because that is what builtin_setjmp will
792 return. This also makes EH slightly more efficient, since we are no
793 longer copying around a value that we don't care about. */
794 gcc_assert (value == const1_rtx);
796 last = get_last_insn ();
797 #ifdef HAVE_builtin_longjmp
798 if (HAVE_builtin_longjmp)
799 emit_insn (gen_builtin_longjmp (buf_addr));
800 else
801 #endif
803 fp = gen_rtx_MEM (Pmode, buf_addr);
804 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
805 GET_MODE_SIZE (Pmode)));
807 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
808 2 * GET_MODE_SIZE (Pmode)));
809 set_mem_alias_set (fp, setjmp_alias_set);
810 set_mem_alias_set (lab, setjmp_alias_set);
811 set_mem_alias_set (stack, setjmp_alias_set);
813 /* Pick up FP, label, and SP from the block and jump. This code is
814 from expand_goto in stmt.c; see there for detailed comments. */
815 #ifdef HAVE_nonlocal_goto
816 if (HAVE_nonlocal_goto)
817 /* We have to pass a value to the nonlocal_goto pattern that will
818 get copied into the static_chain pointer, but it does not matter
819 what that value is, because builtin_setjmp does not use it. */
820 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
821 else
822 #endif
824 lab = copy_to_reg (lab);
826 emit_insn (gen_rtx_CLOBBER (VOIDmode,
827 gen_rtx_MEM (BLKmode,
828 gen_rtx_SCRATCH (VOIDmode))));
829 emit_insn (gen_rtx_CLOBBER (VOIDmode,
830 gen_rtx_MEM (BLKmode,
831 hard_frame_pointer_rtx)));
833 emit_move_insn (hard_frame_pointer_rtx, fp);
834 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
836 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
837 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
838 emit_indirect_jump (lab);
842 /* Search backwards and mark the jump insn as a non-local goto.
843 Note that this precludes the use of __builtin_longjmp to a
844 __builtin_setjmp target in the same function. However, we've
845 already cautioned the user that these functions are for
846 internal exception handling use only. */
847 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
849 gcc_assert (insn != last);
851 if (JUMP_P (insn))
853 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
854 REG_NOTES (insn));
855 break;
857 else if (CALL_P (insn))
858 break;
862 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
863 and the address of the save area. */
865 static rtx
866 expand_builtin_nonlocal_goto (tree exp)
868 tree t_label, t_save_area;
869 rtx r_label, r_save_area, r_fp, r_sp, insn;
871 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
872 return NULL_RTX;
874 t_label = CALL_EXPR_ARG (exp, 0);
875 t_save_area = CALL_EXPR_ARG (exp, 1);
877 r_label = expand_normal (t_label);
878 r_label = convert_memory_address (Pmode, r_label);
879 r_save_area = expand_normal (t_save_area);
880 r_save_area = convert_memory_address (Pmode, r_save_area);
881 r_fp = gen_rtx_MEM (Pmode, r_save_area);
882 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
883 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
885 crtl->has_nonlocal_goto = 1;
887 #ifdef HAVE_nonlocal_goto
888 /* ??? We no longer need to pass the static chain value, afaik. */
889 if (HAVE_nonlocal_goto)
890 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
891 else
892 #endif
894 r_label = copy_to_reg (r_label);
896 emit_insn (gen_rtx_CLOBBER (VOIDmode,
897 gen_rtx_MEM (BLKmode,
898 gen_rtx_SCRATCH (VOIDmode))));
900 emit_insn (gen_rtx_CLOBBER (VOIDmode,
901 gen_rtx_MEM (BLKmode,
902 hard_frame_pointer_rtx)));
904 /* Restore frame pointer for containing function.
905 This sets the actual hard register used for the frame pointer
906 to the location of the function's incoming static chain info.
907 The non-local goto handler will then adjust it to contain the
908 proper value and reload the argument pointer, if needed. */
909 emit_move_insn (hard_frame_pointer_rtx, r_fp);
910 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
912 /* USE of hard_frame_pointer_rtx added for consistency;
913 not clear if really needed. */
914 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
915 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
917 /* If the architecture is using a GP register, we must
918 conservatively assume that the target function makes use of it.
919 The prologue of functions with nonlocal gotos must therefore
920 initialize the GP register to the appropriate value, and we
921 must then make sure that this value is live at the point
922 of the jump. (Note that this doesn't necessarily apply
923 to targets with a nonlocal_goto pattern; they are free
924 to implement it in their own way. Note also that this is
925 a no-op if the GP register is a global invariant.) */
926 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
927 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
928 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
930 emit_indirect_jump (r_label);
933 /* Search backwards to the jump insn and mark it as a
934 non-local goto. */
935 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
937 if (JUMP_P (insn))
939 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
940 const0_rtx, REG_NOTES (insn));
941 break;
943 else if (CALL_P (insn))
944 break;
947 return const0_rtx;
950 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
951 (not all will be used on all machines) that was passed to __builtin_setjmp.
952 It updates the stack pointer in that block to correspond to the current
953 stack pointer. */
955 static void
956 expand_builtin_update_setjmp_buf (rtx buf_addr)
958 enum machine_mode sa_mode = Pmode;
959 rtx stack_save;
962 #ifdef HAVE_save_stack_nonlocal
963 if (HAVE_save_stack_nonlocal)
964 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
965 #endif
966 #ifdef STACK_SAVEAREA_MODE
967 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
968 #endif
970 stack_save
971 = gen_rtx_MEM (sa_mode,
972 memory_address
973 (sa_mode,
974 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
976 #ifdef HAVE_setjmp
977 if (HAVE_setjmp)
978 emit_insn (gen_setjmp ());
979 #endif
981 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
984 /* Expand a call to __builtin_prefetch. For a target that does not support
985 data prefetch, evaluate the memory address argument in case it has side
986 effects. */
988 static void
989 expand_builtin_prefetch (tree exp)
991 tree arg0, arg1, arg2;
992 int nargs;
993 rtx op0, op1, op2;
995 if (!validate_arglist (exp, POINTER_TYPE, 0))
996 return;
998 arg0 = CALL_EXPR_ARG (exp, 0);
1000 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1001 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1002 locality). */
1003 nargs = call_expr_nargs (exp);
1004 if (nargs > 1)
1005 arg1 = CALL_EXPR_ARG (exp, 1);
1006 else
1007 arg1 = integer_zero_node;
1008 if (nargs > 2)
1009 arg2 = CALL_EXPR_ARG (exp, 2);
1010 else
1011 arg2 = build_int_cst (NULL_TREE, 3);
1013 /* Argument 0 is an address. */
1014 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1016 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1017 if (TREE_CODE (arg1) != INTEGER_CST)
1019 error ("second argument to %<__builtin_prefetch%> must be a constant");
1020 arg1 = integer_zero_node;
1022 op1 = expand_normal (arg1);
1023 /* Argument 1 must be either zero or one. */
1024 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1026 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1027 " using zero");
1028 op1 = const0_rtx;
1031 /* Argument 2 (locality) must be a compile-time constant int. */
1032 if (TREE_CODE (arg2) != INTEGER_CST)
1034 error ("third argument to %<__builtin_prefetch%> must be a constant");
1035 arg2 = integer_zero_node;
1037 op2 = expand_normal (arg2);
1038 /* Argument 2 must be 0, 1, 2, or 3. */
1039 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1041 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1042 op2 = const0_rtx;
1045 #ifdef HAVE_prefetch
1046 if (HAVE_prefetch)
1048 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1049 (op0,
1050 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1051 || (GET_MODE (op0) != Pmode))
1053 op0 = convert_memory_address (Pmode, op0);
1054 op0 = force_reg (Pmode, op0);
1056 emit_insn (gen_prefetch (op0, op1, op2));
1058 #endif
1060 /* Don't do anything with direct references to volatile memory, but
1061 generate code to handle other side effects. */
1062 if (!MEM_P (op0) && side_effects_p (op0))
1063 emit_insn (op0);
1066 /* Get a MEM rtx for expression EXP which is the address of an operand
1067 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1068 the maximum length of the block of memory that might be accessed or
1069 NULL if unknown. */
1071 static rtx
1072 get_memory_rtx (tree exp, tree len)
1074 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1075 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1077 /* Get an expression we can use to find the attributes to assign to MEM.
1078 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1079 we can. First remove any nops. */
1080 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR)
1081 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1082 exp = TREE_OPERAND (exp, 0);
1084 if (TREE_CODE (exp) == ADDR_EXPR)
1085 exp = TREE_OPERAND (exp, 0);
1086 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1087 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1088 else
1089 exp = NULL;
1091 /* Honor attributes derived from exp, except for the alias set
1092 (as builtin stringops may alias with anything) and the size
1093 (as stringops may access multiple array elements). */
1094 if (exp)
1096 set_mem_attributes (mem, exp, 0);
1098 /* Allow the string and memory builtins to overflow from one
1099 field into another, see http://gcc.gnu.org/PR23561.
1100 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1101 memory accessed by the string or memory builtin will fit
1102 within the field. */
1103 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1105 tree mem_expr = MEM_EXPR (mem);
1106 HOST_WIDE_INT offset = -1, length = -1;
1107 tree inner = exp;
1109 while (TREE_CODE (inner) == ARRAY_REF
1110 || TREE_CODE (inner) == NOP_EXPR
1111 || TREE_CODE (inner) == CONVERT_EXPR
1112 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1113 || TREE_CODE (inner) == SAVE_EXPR)
1114 inner = TREE_OPERAND (inner, 0);
1116 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1118 if (MEM_OFFSET (mem)
1119 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1120 offset = INTVAL (MEM_OFFSET (mem));
1122 if (offset >= 0 && len && host_integerp (len, 0))
1123 length = tree_low_cst (len, 0);
1125 while (TREE_CODE (inner) == COMPONENT_REF)
1127 tree field = TREE_OPERAND (inner, 1);
1128 gcc_assert (! DECL_BIT_FIELD (field));
1129 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1130 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1132 if (length >= 0
1133 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1134 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1136 HOST_WIDE_INT size
1137 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1138 /* If we can prove the memory starting at XEXP (mem, 0)
1139 and ending at XEXP (mem, 0) + LENGTH will fit into
1140 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1141 if (offset <= size
1142 && length <= size
1143 && offset + length <= size)
1144 break;
1147 if (offset >= 0
1148 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1149 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1150 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1151 / BITS_PER_UNIT;
1152 else
1154 offset = -1;
1155 length = -1;
1158 mem_expr = TREE_OPERAND (mem_expr, 0);
1159 inner = TREE_OPERAND (inner, 0);
1162 if (mem_expr == NULL)
1163 offset = -1;
1164 if (mem_expr != MEM_EXPR (mem))
1166 set_mem_expr (mem, mem_expr);
1167 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1170 set_mem_alias_set (mem, 0);
1171 set_mem_size (mem, NULL_RTX);
1174 return mem;
1177 /* Built-in functions to perform an untyped call and return. */
1179 /* For each register that may be used for calling a function, this
1180 gives a mode used to copy the register's value. VOIDmode indicates
1181 the register is not used for calling a function. If the machine
1182 has register windows, this gives only the outbound registers.
1183 INCOMING_REGNO gives the corresponding inbound register. */
1184 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1186 /* For each register that may be used for returning values, this gives
1187 a mode used to copy the register's value. VOIDmode indicates the
1188 register is not used for returning values. If the machine has
1189 register windows, this gives only the outbound registers.
1190 INCOMING_REGNO gives the corresponding inbound register. */
1191 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1193 /* For each register that may be used for calling a function, this
1194 gives the offset of that register into the block returned by
1195 __builtin_apply_args. 0 indicates that the register is not
1196 used for calling a function. */
1197 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1199 /* Return the size required for the block returned by __builtin_apply_args,
1200 and initialize apply_args_mode. */
1202 static int
1203 apply_args_size (void)
1205 static int size = -1;
1206 int align;
1207 unsigned int regno;
1208 enum machine_mode mode;
1210 /* The values computed by this function never change. */
1211 if (size < 0)
1213 /* The first value is the incoming arg-pointer. */
1214 size = GET_MODE_SIZE (Pmode);
1216 /* The second value is the structure value address unless this is
1217 passed as an "invisible" first argument. */
1218 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1219 size += GET_MODE_SIZE (Pmode);
1221 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1222 if (FUNCTION_ARG_REGNO_P (regno))
1224 mode = reg_raw_mode[regno];
1226 gcc_assert (mode != VOIDmode);
1228 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1229 if (size % align != 0)
1230 size = CEIL (size, align) * align;
1231 apply_args_reg_offset[regno] = size;
1232 size += GET_MODE_SIZE (mode);
1233 apply_args_mode[regno] = mode;
1235 else
1237 apply_args_mode[regno] = VOIDmode;
1238 apply_args_reg_offset[regno] = 0;
1241 return size;
1244 /* Return the size required for the block returned by __builtin_apply,
1245 and initialize apply_result_mode. */
1247 static int
1248 apply_result_size (void)
1250 static int size = -1;
1251 int align, regno;
1252 enum machine_mode mode;
1254 /* The values computed by this function never change. */
1255 if (size < 0)
1257 size = 0;
1259 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1260 if (FUNCTION_VALUE_REGNO_P (regno))
1262 mode = reg_raw_mode[regno];
1264 gcc_assert (mode != VOIDmode);
1266 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1267 if (size % align != 0)
1268 size = CEIL (size, align) * align;
1269 size += GET_MODE_SIZE (mode);
1270 apply_result_mode[regno] = mode;
1272 else
1273 apply_result_mode[regno] = VOIDmode;
1275 /* Allow targets that use untyped_call and untyped_return to override
1276 the size so that machine-specific information can be stored here. */
1277 #ifdef APPLY_RESULT_SIZE
1278 size = APPLY_RESULT_SIZE;
1279 #endif
1281 return size;
1284 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1285 /* Create a vector describing the result block RESULT. If SAVEP is true,
1286 the result block is used to save the values; otherwise it is used to
1287 restore the values. */
1289 static rtx
1290 result_vector (int savep, rtx result)
1292 int regno, size, align, nelts;
1293 enum machine_mode mode;
1294 rtx reg, mem;
1295 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1297 size = nelts = 0;
1298 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1299 if ((mode = apply_result_mode[regno]) != VOIDmode)
1301 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1302 if (size % align != 0)
1303 size = CEIL (size, align) * align;
1304 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1305 mem = adjust_address (result, mode, size);
1306 savevec[nelts++] = (savep
1307 ? gen_rtx_SET (VOIDmode, mem, reg)
1308 : gen_rtx_SET (VOIDmode, reg, mem));
1309 size += GET_MODE_SIZE (mode);
1311 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1313 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1315 /* Save the state required to perform an untyped call with the same
1316 arguments as were passed to the current function. */
1318 static rtx
1319 expand_builtin_apply_args_1 (void)
1321 rtx registers, tem;
1322 int size, align, regno;
1323 enum machine_mode mode;
1324 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1326 /* Create a block where the arg-pointer, structure value address,
1327 and argument registers can be saved. */
1328 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1330 /* Walk past the arg-pointer and structure value address. */
1331 size = GET_MODE_SIZE (Pmode);
1332 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1333 size += GET_MODE_SIZE (Pmode);
1335 /* Save each register used in calling a function to the block. */
1336 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1337 if ((mode = apply_args_mode[regno]) != VOIDmode)
1339 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1340 if (size % align != 0)
1341 size = CEIL (size, align) * align;
1343 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1345 emit_move_insn (adjust_address (registers, mode, size), tem);
1346 size += GET_MODE_SIZE (mode);
1349 /* Save the arg pointer to the block. */
1350 tem = copy_to_reg (virtual_incoming_args_rtx);
1351 #ifdef STACK_GROWS_DOWNWARD
1352 /* We need the pointer as the caller actually passed them to us, not
1353 as we might have pretended they were passed. Make sure it's a valid
1354 operand, as emit_move_insn isn't expected to handle a PLUS. */
1356 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1357 NULL_RTX);
1358 #endif
1359 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1361 size = GET_MODE_SIZE (Pmode);
1363 /* Save the structure value address unless this is passed as an
1364 "invisible" first argument. */
1365 if (struct_incoming_value)
1367 emit_move_insn (adjust_address (registers, Pmode, size),
1368 copy_to_reg (struct_incoming_value));
1369 size += GET_MODE_SIZE (Pmode);
1372 /* Return the address of the block. */
1373 return copy_addr_to_reg (XEXP (registers, 0));
1376 /* __builtin_apply_args returns block of memory allocated on
1377 the stack into which is stored the arg pointer, structure
1378 value address, static chain, and all the registers that might
1379 possibly be used in performing a function call. The code is
1380 moved to the start of the function so the incoming values are
1381 saved. */
1383 static rtx
1384 expand_builtin_apply_args (void)
1386 /* Don't do __builtin_apply_args more than once in a function.
1387 Save the result of the first call and reuse it. */
1388 if (apply_args_value != 0)
1389 return apply_args_value;
1391 /* When this function is called, it means that registers must be
1392 saved on entry to this function. So we migrate the
1393 call to the first insn of this function. */
1394 rtx temp;
1395 rtx seq;
1397 start_sequence ();
1398 temp = expand_builtin_apply_args_1 ();
1399 seq = get_insns ();
1400 end_sequence ();
1402 apply_args_value = temp;
1404 /* Put the insns after the NOTE that starts the function.
1405 If this is inside a start_sequence, make the outer-level insn
1406 chain current, so the code is placed at the start of the
1407 function. */
1408 push_topmost_sequence ();
1409 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1410 pop_topmost_sequence ();
1411 return temp;
1415 /* Perform an untyped call and save the state required to perform an
1416 untyped return of whatever value was returned by the given function. */
1418 static rtx
1419 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1421 int size, align, regno;
1422 enum machine_mode mode;
1423 rtx incoming_args, result, reg, dest, src, call_insn;
1424 rtx old_stack_level = 0;
1425 rtx call_fusage = 0;
1426 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1428 arguments = convert_memory_address (Pmode, arguments);
1430 /* Create a block where the return registers can be saved. */
1431 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1433 /* Fetch the arg pointer from the ARGUMENTS block. */
1434 incoming_args = gen_reg_rtx (Pmode);
1435 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1436 #ifndef STACK_GROWS_DOWNWARD
1437 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1438 incoming_args, 0, OPTAB_LIB_WIDEN);
1439 #endif
1441 /* Push a new argument block and copy the arguments. Do not allow
1442 the (potential) memcpy call below to interfere with our stack
1443 manipulations. */
1444 do_pending_stack_adjust ();
1445 NO_DEFER_POP;
1447 /* Save the stack with nonlocal if available. */
1448 #ifdef HAVE_save_stack_nonlocal
1449 if (HAVE_save_stack_nonlocal)
1450 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1451 else
1452 #endif
1453 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1455 /* Allocate a block of memory onto the stack and copy the memory
1456 arguments to the outgoing arguments address. */
1457 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1458 dest = virtual_outgoing_args_rtx;
1459 #ifndef STACK_GROWS_DOWNWARD
1460 if (GET_CODE (argsize) == CONST_INT)
1461 dest = plus_constant (dest, -INTVAL (argsize));
1462 else
1463 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1464 #endif
1465 dest = gen_rtx_MEM (BLKmode, dest);
1466 set_mem_align (dest, PARM_BOUNDARY);
1467 src = gen_rtx_MEM (BLKmode, incoming_args);
1468 set_mem_align (src, PARM_BOUNDARY);
1469 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1471 /* Refer to the argument block. */
1472 apply_args_size ();
1473 arguments = gen_rtx_MEM (BLKmode, arguments);
1474 set_mem_align (arguments, PARM_BOUNDARY);
1476 /* Walk past the arg-pointer and structure value address. */
1477 size = GET_MODE_SIZE (Pmode);
1478 if (struct_value)
1479 size += GET_MODE_SIZE (Pmode);
1481 /* Restore each of the registers previously saved. Make USE insns
1482 for each of these registers for use in making the call. */
1483 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1484 if ((mode = apply_args_mode[regno]) != VOIDmode)
1486 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1487 if (size % align != 0)
1488 size = CEIL (size, align) * align;
1489 reg = gen_rtx_REG (mode, regno);
1490 emit_move_insn (reg, adjust_address (arguments, mode, size));
1491 use_reg (&call_fusage, reg);
1492 size += GET_MODE_SIZE (mode);
1495 /* Restore the structure value address unless this is passed as an
1496 "invisible" first argument. */
1497 size = GET_MODE_SIZE (Pmode);
1498 if (struct_value)
1500 rtx value = gen_reg_rtx (Pmode);
1501 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1502 emit_move_insn (struct_value, value);
1503 if (REG_P (struct_value))
1504 use_reg (&call_fusage, struct_value);
1505 size += GET_MODE_SIZE (Pmode);
1508 /* All arguments and registers used for the call are set up by now! */
1509 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1511 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1512 and we don't want to load it into a register as an optimization,
1513 because prepare_call_address already did it if it should be done. */
1514 if (GET_CODE (function) != SYMBOL_REF)
1515 function = memory_address (FUNCTION_MODE, function);
1517 /* Generate the actual call instruction and save the return value. */
1518 #ifdef HAVE_untyped_call
1519 if (HAVE_untyped_call)
1520 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1521 result, result_vector (1, result)));
1522 else
1523 #endif
1524 #ifdef HAVE_call_value
1525 if (HAVE_call_value)
1527 rtx valreg = 0;
1529 /* Locate the unique return register. It is not possible to
1530 express a call that sets more than one return register using
1531 call_value; use untyped_call for that. In fact, untyped_call
1532 only needs to save the return registers in the given block. */
1533 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1534 if ((mode = apply_result_mode[regno]) != VOIDmode)
1536 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1538 valreg = gen_rtx_REG (mode, regno);
1541 emit_call_insn (GEN_CALL_VALUE (valreg,
1542 gen_rtx_MEM (FUNCTION_MODE, function),
1543 const0_rtx, NULL_RTX, const0_rtx));
1545 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1547 else
1548 #endif
1549 gcc_unreachable ();
1551 /* Find the CALL insn we just emitted, and attach the register usage
1552 information. */
1553 call_insn = last_call_insn ();
1554 add_function_usage_to (call_insn, call_fusage);
1556 /* Restore the stack. */
1557 #ifdef HAVE_save_stack_nonlocal
1558 if (HAVE_save_stack_nonlocal)
1559 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1560 else
1561 #endif
1562 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1564 OK_DEFER_POP;
1566 /* Return the address of the result block. */
1567 result = copy_addr_to_reg (XEXP (result, 0));
1568 return convert_memory_address (ptr_mode, result);
1571 /* Perform an untyped return. */
1573 static void
1574 expand_builtin_return (rtx result)
1576 int size, align, regno;
1577 enum machine_mode mode;
1578 rtx reg;
1579 rtx call_fusage = 0;
1581 result = convert_memory_address (Pmode, result);
1583 apply_result_size ();
1584 result = gen_rtx_MEM (BLKmode, result);
1586 #ifdef HAVE_untyped_return
1587 if (HAVE_untyped_return)
1589 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1590 emit_barrier ();
1591 return;
1593 #endif
1595 /* Restore the return value and note that each value is used. */
1596 size = 0;
1597 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1598 if ((mode = apply_result_mode[regno]) != VOIDmode)
1600 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1601 if (size % align != 0)
1602 size = CEIL (size, align) * align;
1603 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1604 emit_move_insn (reg, adjust_address (result, mode, size));
1606 push_to_sequence (call_fusage);
1607 emit_insn (gen_rtx_USE (VOIDmode, reg));
1608 call_fusage = get_insns ();
1609 end_sequence ();
1610 size += GET_MODE_SIZE (mode);
1613 /* Put the USE insns before the return. */
1614 emit_insn (call_fusage);
1616 /* Return whatever values was restored by jumping directly to the end
1617 of the function. */
1618 expand_naked_return ();
1621 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1623 static enum type_class
1624 type_to_class (tree type)
1626 switch (TREE_CODE (type))
1628 case VOID_TYPE: return void_type_class;
1629 case INTEGER_TYPE: return integer_type_class;
1630 case ENUMERAL_TYPE: return enumeral_type_class;
1631 case BOOLEAN_TYPE: return boolean_type_class;
1632 case POINTER_TYPE: return pointer_type_class;
1633 case REFERENCE_TYPE: return reference_type_class;
1634 case OFFSET_TYPE: return offset_type_class;
1635 case REAL_TYPE: return real_type_class;
1636 case COMPLEX_TYPE: return complex_type_class;
1637 case FUNCTION_TYPE: return function_type_class;
1638 case METHOD_TYPE: return method_type_class;
1639 case RECORD_TYPE: return record_type_class;
1640 case UNION_TYPE:
1641 case QUAL_UNION_TYPE: return union_type_class;
1642 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1643 ? string_type_class : array_type_class);
1644 case LANG_TYPE: return lang_type_class;
1645 default: return no_type_class;
1649 /* Expand a call EXP to __builtin_classify_type. */
1651 static rtx
1652 expand_builtin_classify_type (tree exp)
1654 if (call_expr_nargs (exp))
1655 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1656 return GEN_INT (no_type_class);
1659 /* This helper macro, meant to be used in mathfn_built_in below,
1660 determines which among a set of three builtin math functions is
1661 appropriate for a given type mode. The `F' and `L' cases are
1662 automatically generated from the `double' case. */
1663 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1664 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1665 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1666 fcodel = BUILT_IN_MATHFN##L ; break;
1667 /* Similar to above, but appends _R after any F/L suffix. */
1668 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1669 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1670 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1671 fcodel = BUILT_IN_MATHFN##L_R ; break;
1673 /* Return mathematic function equivalent to FN but operating directly
1674 on TYPE, if available. If we can't do the conversion, return zero. */
1675 tree
1676 mathfn_built_in (tree type, enum built_in_function fn)
1678 enum built_in_function fcode, fcodef, fcodel;
1680 switch (fn)
1682 CASE_MATHFN (BUILT_IN_ACOS)
1683 CASE_MATHFN (BUILT_IN_ACOSH)
1684 CASE_MATHFN (BUILT_IN_ASIN)
1685 CASE_MATHFN (BUILT_IN_ASINH)
1686 CASE_MATHFN (BUILT_IN_ATAN)
1687 CASE_MATHFN (BUILT_IN_ATAN2)
1688 CASE_MATHFN (BUILT_IN_ATANH)
1689 CASE_MATHFN (BUILT_IN_CBRT)
1690 CASE_MATHFN (BUILT_IN_CEIL)
1691 CASE_MATHFN (BUILT_IN_CEXPI)
1692 CASE_MATHFN (BUILT_IN_COPYSIGN)
1693 CASE_MATHFN (BUILT_IN_COS)
1694 CASE_MATHFN (BUILT_IN_COSH)
1695 CASE_MATHFN (BUILT_IN_DREM)
1696 CASE_MATHFN (BUILT_IN_ERF)
1697 CASE_MATHFN (BUILT_IN_ERFC)
1698 CASE_MATHFN (BUILT_IN_EXP)
1699 CASE_MATHFN (BUILT_IN_EXP10)
1700 CASE_MATHFN (BUILT_IN_EXP2)
1701 CASE_MATHFN (BUILT_IN_EXPM1)
1702 CASE_MATHFN (BUILT_IN_FABS)
1703 CASE_MATHFN (BUILT_IN_FDIM)
1704 CASE_MATHFN (BUILT_IN_FLOOR)
1705 CASE_MATHFN (BUILT_IN_FMA)
1706 CASE_MATHFN (BUILT_IN_FMAX)
1707 CASE_MATHFN (BUILT_IN_FMIN)
1708 CASE_MATHFN (BUILT_IN_FMOD)
1709 CASE_MATHFN (BUILT_IN_FREXP)
1710 CASE_MATHFN (BUILT_IN_GAMMA)
1711 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1712 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1713 CASE_MATHFN (BUILT_IN_HYPOT)
1714 CASE_MATHFN (BUILT_IN_ILOGB)
1715 CASE_MATHFN (BUILT_IN_INF)
1716 CASE_MATHFN (BUILT_IN_ISINF)
1717 CASE_MATHFN (BUILT_IN_J0)
1718 CASE_MATHFN (BUILT_IN_J1)
1719 CASE_MATHFN (BUILT_IN_JN)
1720 CASE_MATHFN (BUILT_IN_LCEIL)
1721 CASE_MATHFN (BUILT_IN_LDEXP)
1722 CASE_MATHFN (BUILT_IN_LFLOOR)
1723 CASE_MATHFN (BUILT_IN_LGAMMA)
1724 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1725 CASE_MATHFN (BUILT_IN_LLCEIL)
1726 CASE_MATHFN (BUILT_IN_LLFLOOR)
1727 CASE_MATHFN (BUILT_IN_LLRINT)
1728 CASE_MATHFN (BUILT_IN_LLROUND)
1729 CASE_MATHFN (BUILT_IN_LOG)
1730 CASE_MATHFN (BUILT_IN_LOG10)
1731 CASE_MATHFN (BUILT_IN_LOG1P)
1732 CASE_MATHFN (BUILT_IN_LOG2)
1733 CASE_MATHFN (BUILT_IN_LOGB)
1734 CASE_MATHFN (BUILT_IN_LRINT)
1735 CASE_MATHFN (BUILT_IN_LROUND)
1736 CASE_MATHFN (BUILT_IN_MODF)
1737 CASE_MATHFN (BUILT_IN_NAN)
1738 CASE_MATHFN (BUILT_IN_NANS)
1739 CASE_MATHFN (BUILT_IN_NEARBYINT)
1740 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1741 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1742 CASE_MATHFN (BUILT_IN_POW)
1743 CASE_MATHFN (BUILT_IN_POWI)
1744 CASE_MATHFN (BUILT_IN_POW10)
1745 CASE_MATHFN (BUILT_IN_REMAINDER)
1746 CASE_MATHFN (BUILT_IN_REMQUO)
1747 CASE_MATHFN (BUILT_IN_RINT)
1748 CASE_MATHFN (BUILT_IN_ROUND)
1749 CASE_MATHFN (BUILT_IN_SCALB)
1750 CASE_MATHFN (BUILT_IN_SCALBLN)
1751 CASE_MATHFN (BUILT_IN_SCALBN)
1752 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1753 CASE_MATHFN (BUILT_IN_SIN)
1754 CASE_MATHFN (BUILT_IN_SINCOS)
1755 CASE_MATHFN (BUILT_IN_SINH)
1756 CASE_MATHFN (BUILT_IN_SQRT)
1757 CASE_MATHFN (BUILT_IN_TAN)
1758 CASE_MATHFN (BUILT_IN_TANH)
1759 CASE_MATHFN (BUILT_IN_TGAMMA)
1760 CASE_MATHFN (BUILT_IN_TRUNC)
1761 CASE_MATHFN (BUILT_IN_Y0)
1762 CASE_MATHFN (BUILT_IN_Y1)
1763 CASE_MATHFN (BUILT_IN_YN)
1765 default:
1766 return NULL_TREE;
1769 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1770 return implicit_built_in_decls[fcode];
1771 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1772 return implicit_built_in_decls[fcodef];
1773 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1774 return implicit_built_in_decls[fcodel];
1775 else
1776 return NULL_TREE;
1779 /* If errno must be maintained, expand the RTL to check if the result,
1780 TARGET, of a built-in function call, EXP, is NaN, and if so set
1781 errno to EDOM. */
1783 static void
1784 expand_errno_check (tree exp, rtx target)
1786 rtx lab = gen_label_rtx ();
1788 /* Test the result; if it is NaN, set errno=EDOM because
1789 the argument was not in the domain. */
1790 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1791 0, lab);
1793 #ifdef TARGET_EDOM
1794 /* If this built-in doesn't throw an exception, set errno directly. */
1795 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1797 #ifdef GEN_ERRNO_RTX
1798 rtx errno_rtx = GEN_ERRNO_RTX;
1799 #else
1800 rtx errno_rtx
1801 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1802 #endif
1803 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1804 emit_label (lab);
1805 return;
1807 #endif
1809 /* Make sure the library call isn't expanded as a tail call. */
1810 CALL_EXPR_TAILCALL (exp) = 0;
1812 /* We can't set errno=EDOM directly; let the library call do it.
1813 Pop the arguments right away in case the call gets deleted. */
1814 NO_DEFER_POP;
1815 expand_call (exp, target, 0);
1816 OK_DEFER_POP;
1817 emit_label (lab);
1820 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1821 Return NULL_RTX if a normal call should be emitted rather than expanding
1822 the function in-line. EXP is the expression that is a call to the builtin
1823 function; if convenient, the result should be placed in TARGET.
1824 SUBTARGET may be used as the target for computing one of EXP's operands. */
1826 static rtx
1827 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1829 optab builtin_optab;
1830 rtx op0, insns, before_call;
1831 tree fndecl = get_callee_fndecl (exp);
1832 enum machine_mode mode;
1833 bool errno_set = false;
1834 tree arg;
1836 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1837 return NULL_RTX;
1839 arg = CALL_EXPR_ARG (exp, 0);
1841 switch (DECL_FUNCTION_CODE (fndecl))
1843 CASE_FLT_FN (BUILT_IN_SQRT):
1844 errno_set = ! tree_expr_nonnegative_p (arg);
1845 builtin_optab = sqrt_optab;
1846 break;
1847 CASE_FLT_FN (BUILT_IN_EXP):
1848 errno_set = true; builtin_optab = exp_optab; break;
1849 CASE_FLT_FN (BUILT_IN_EXP10):
1850 CASE_FLT_FN (BUILT_IN_POW10):
1851 errno_set = true; builtin_optab = exp10_optab; break;
1852 CASE_FLT_FN (BUILT_IN_EXP2):
1853 errno_set = true; builtin_optab = exp2_optab; break;
1854 CASE_FLT_FN (BUILT_IN_EXPM1):
1855 errno_set = true; builtin_optab = expm1_optab; break;
1856 CASE_FLT_FN (BUILT_IN_LOGB):
1857 errno_set = true; builtin_optab = logb_optab; break;
1858 CASE_FLT_FN (BUILT_IN_LOG):
1859 errno_set = true; builtin_optab = log_optab; break;
1860 CASE_FLT_FN (BUILT_IN_LOG10):
1861 errno_set = true; builtin_optab = log10_optab; break;
1862 CASE_FLT_FN (BUILT_IN_LOG2):
1863 errno_set = true; builtin_optab = log2_optab; break;
1864 CASE_FLT_FN (BUILT_IN_LOG1P):
1865 errno_set = true; builtin_optab = log1p_optab; break;
1866 CASE_FLT_FN (BUILT_IN_ASIN):
1867 builtin_optab = asin_optab; break;
1868 CASE_FLT_FN (BUILT_IN_ACOS):
1869 builtin_optab = acos_optab; break;
1870 CASE_FLT_FN (BUILT_IN_TAN):
1871 builtin_optab = tan_optab; break;
1872 CASE_FLT_FN (BUILT_IN_ATAN):
1873 builtin_optab = atan_optab; break;
1874 CASE_FLT_FN (BUILT_IN_FLOOR):
1875 builtin_optab = floor_optab; break;
1876 CASE_FLT_FN (BUILT_IN_CEIL):
1877 builtin_optab = ceil_optab; break;
1878 CASE_FLT_FN (BUILT_IN_TRUNC):
1879 builtin_optab = btrunc_optab; break;
1880 CASE_FLT_FN (BUILT_IN_ROUND):
1881 builtin_optab = round_optab; break;
1882 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1883 builtin_optab = nearbyint_optab;
1884 if (flag_trapping_math)
1885 break;
1886 /* Else fallthrough and expand as rint. */
1887 CASE_FLT_FN (BUILT_IN_RINT):
1888 builtin_optab = rint_optab; break;
1889 default:
1890 gcc_unreachable ();
1893 /* Make a suitable register to place result in. */
1894 mode = TYPE_MODE (TREE_TYPE (exp));
1896 if (! flag_errno_math || ! HONOR_NANS (mode))
1897 errno_set = false;
1899 /* Before working hard, check whether the instruction is available. */
1900 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1902 target = gen_reg_rtx (mode);
1904 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1905 need to expand the argument again. This way, we will not perform
1906 side-effects more the once. */
1907 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1909 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1911 start_sequence ();
1913 /* Compute into TARGET.
1914 Set TARGET to wherever the result comes back. */
1915 target = expand_unop (mode, builtin_optab, op0, target, 0);
1917 if (target != 0)
1919 if (errno_set)
1920 expand_errno_check (exp, target);
1922 /* Output the entire sequence. */
1923 insns = get_insns ();
1924 end_sequence ();
1925 emit_insn (insns);
1926 return target;
1929 /* If we were unable to expand via the builtin, stop the sequence
1930 (without outputting the insns) and call to the library function
1931 with the stabilized argument list. */
1932 end_sequence ();
1935 before_call = get_last_insn ();
1937 target = expand_call (exp, target, target == const0_rtx);
1939 /* If this is a sqrt operation and we don't care about errno, try to
1940 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1941 This allows the semantics of the libcall to be visible to the RTL
1942 optimizers. */
1943 if (builtin_optab == sqrt_optab && !errno_set)
1945 /* Search backwards through the insns emitted by expand_call looking
1946 for the instruction with the REG_RETVAL note. */
1947 rtx last = get_last_insn ();
1948 while (last != before_call)
1950 if (find_reg_note (last, REG_RETVAL, NULL))
1952 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1953 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1954 two elements, i.e. symbol_ref(sqrt) and the operand. */
1955 if (note
1956 && GET_CODE (note) == EXPR_LIST
1957 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1958 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1959 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1961 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1962 /* Check operand is a register with expected mode. */
1963 if (operand
1964 && REG_P (operand)
1965 && GET_MODE (operand) == mode)
1967 /* Replace the REG_EQUAL note with a SQRT rtx. */
1968 rtx equiv = gen_rtx_SQRT (mode, operand);
1969 set_unique_reg_note (last, REG_EQUAL, equiv);
1972 break;
1974 last = PREV_INSN (last);
1978 return target;
1981 /* Expand a call to the builtin binary math functions (pow and atan2).
1982 Return NULL_RTX if a normal call should be emitted rather than expanding the
1983 function in-line. EXP is the expression that is a call to the builtin
1984 function; if convenient, the result should be placed in TARGET.
1985 SUBTARGET may be used as the target for computing one of EXP's
1986 operands. */
1988 static rtx
1989 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1991 optab builtin_optab;
1992 rtx op0, op1, insns;
1993 int op1_type = REAL_TYPE;
1994 tree fndecl = get_callee_fndecl (exp);
1995 tree arg0, arg1;
1996 enum machine_mode mode;
1997 bool errno_set = true;
1999 switch (DECL_FUNCTION_CODE (fndecl))
2001 CASE_FLT_FN (BUILT_IN_SCALBN):
2002 CASE_FLT_FN (BUILT_IN_SCALBLN):
2003 CASE_FLT_FN (BUILT_IN_LDEXP):
2004 op1_type = INTEGER_TYPE;
2005 default:
2006 break;
2009 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2010 return NULL_RTX;
2012 arg0 = CALL_EXPR_ARG (exp, 0);
2013 arg1 = CALL_EXPR_ARG (exp, 1);
2015 switch (DECL_FUNCTION_CODE (fndecl))
2017 CASE_FLT_FN (BUILT_IN_POW):
2018 builtin_optab = pow_optab; break;
2019 CASE_FLT_FN (BUILT_IN_ATAN2):
2020 builtin_optab = atan2_optab; break;
2021 CASE_FLT_FN (BUILT_IN_SCALB):
2022 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2023 return 0;
2024 builtin_optab = scalb_optab; break;
2025 CASE_FLT_FN (BUILT_IN_SCALBN):
2026 CASE_FLT_FN (BUILT_IN_SCALBLN):
2027 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2028 return 0;
2029 /* Fall through... */
2030 CASE_FLT_FN (BUILT_IN_LDEXP):
2031 builtin_optab = ldexp_optab; break;
2032 CASE_FLT_FN (BUILT_IN_FMOD):
2033 builtin_optab = fmod_optab; break;
2034 CASE_FLT_FN (BUILT_IN_REMAINDER):
2035 CASE_FLT_FN (BUILT_IN_DREM):
2036 builtin_optab = remainder_optab; break;
2037 default:
2038 gcc_unreachable ();
2041 /* Make a suitable register to place result in. */
2042 mode = TYPE_MODE (TREE_TYPE (exp));
2044 /* Before working hard, check whether the instruction is available. */
2045 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2046 return NULL_RTX;
2048 target = gen_reg_rtx (mode);
2050 if (! flag_errno_math || ! HONOR_NANS (mode))
2051 errno_set = false;
2053 /* Always stabilize the argument list. */
2054 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2055 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2057 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2058 op1 = expand_normal (arg1);
2060 start_sequence ();
2062 /* Compute into TARGET.
2063 Set TARGET to wherever the result comes back. */
2064 target = expand_binop (mode, builtin_optab, op0, op1,
2065 target, 0, OPTAB_DIRECT);
2067 /* If we were unable to expand via the builtin, stop the sequence
2068 (without outputting the insns) and call to the library function
2069 with the stabilized argument list. */
2070 if (target == 0)
2072 end_sequence ();
2073 return expand_call (exp, target, target == const0_rtx);
2076 if (errno_set)
2077 expand_errno_check (exp, target);
2079 /* Output the entire sequence. */
2080 insns = get_insns ();
2081 end_sequence ();
2082 emit_insn (insns);
2084 return target;
2087 /* Expand a call to the builtin sin and cos math functions.
2088 Return NULL_RTX if a normal call should be emitted rather than expanding the
2089 function in-line. EXP is the expression that is a call to the builtin
2090 function; if convenient, the result should be placed in TARGET.
2091 SUBTARGET may be used as the target for computing one of EXP's
2092 operands. */
2094 static rtx
2095 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2097 optab builtin_optab;
2098 rtx op0, insns;
2099 tree fndecl = get_callee_fndecl (exp);
2100 enum machine_mode mode;
2101 tree arg;
2103 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2104 return NULL_RTX;
2106 arg = CALL_EXPR_ARG (exp, 0);
2108 switch (DECL_FUNCTION_CODE (fndecl))
2110 CASE_FLT_FN (BUILT_IN_SIN):
2111 CASE_FLT_FN (BUILT_IN_COS):
2112 builtin_optab = sincos_optab; break;
2113 default:
2114 gcc_unreachable ();
2117 /* Make a suitable register to place result in. */
2118 mode = TYPE_MODE (TREE_TYPE (exp));
2120 /* Check if sincos insn is available, otherwise fallback
2121 to sin or cos insn. */
2122 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2123 switch (DECL_FUNCTION_CODE (fndecl))
2125 CASE_FLT_FN (BUILT_IN_SIN):
2126 builtin_optab = sin_optab; break;
2127 CASE_FLT_FN (BUILT_IN_COS):
2128 builtin_optab = cos_optab; break;
2129 default:
2130 gcc_unreachable ();
2133 /* Before working hard, check whether the instruction is available. */
2134 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2136 target = gen_reg_rtx (mode);
2138 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2139 need to expand the argument again. This way, we will not perform
2140 side-effects more the once. */
2141 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2143 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2145 start_sequence ();
2147 /* Compute into TARGET.
2148 Set TARGET to wherever the result comes back. */
2149 if (builtin_optab == sincos_optab)
2151 int result;
2153 switch (DECL_FUNCTION_CODE (fndecl))
2155 CASE_FLT_FN (BUILT_IN_SIN):
2156 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2157 break;
2158 CASE_FLT_FN (BUILT_IN_COS):
2159 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2160 break;
2161 default:
2162 gcc_unreachable ();
2164 gcc_assert (result);
2166 else
2168 target = expand_unop (mode, builtin_optab, op0, target, 0);
2171 if (target != 0)
2173 /* Output the entire sequence. */
2174 insns = get_insns ();
2175 end_sequence ();
2176 emit_insn (insns);
2177 return target;
2180 /* If we were unable to expand via the builtin, stop the sequence
2181 (without outputting the insns) and call to the library function
2182 with the stabilized argument list. */
2183 end_sequence ();
2186 target = expand_call (exp, target, target == const0_rtx);
2188 return target;
2191 /* Expand a call to one of the builtin math functions that operate on
2192 floating point argument and output an integer result (ilogb, isinf,
2193 isnan, etc).
2194 Return 0 if a normal call should be emitted rather than expanding the
2195 function in-line. EXP is the expression that is a call to the builtin
2196 function; if convenient, the result should be placed in TARGET.
2197 SUBTARGET may be used as the target for computing one of EXP's operands. */
2199 static rtx
2200 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2202 optab builtin_optab = 0;
2203 enum insn_code icode = CODE_FOR_nothing;
2204 rtx op0;
2205 tree fndecl = get_callee_fndecl (exp);
2206 enum machine_mode mode;
2207 bool errno_set = false;
2208 tree arg;
2210 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2211 return NULL_RTX;
2213 arg = CALL_EXPR_ARG (exp, 0);
2215 switch (DECL_FUNCTION_CODE (fndecl))
2217 CASE_FLT_FN (BUILT_IN_ILOGB):
2218 errno_set = true; builtin_optab = ilogb_optab; break;
2219 CASE_FLT_FN (BUILT_IN_ISINF):
2220 builtin_optab = isinf_optab; break;
2221 case BUILT_IN_ISNORMAL:
2222 case BUILT_IN_ISFINITE:
2223 CASE_FLT_FN (BUILT_IN_FINITE):
2224 /* These builtins have no optabs (yet). */
2225 break;
2226 default:
2227 gcc_unreachable ();
2230 /* There's no easy way to detect the case we need to set EDOM. */
2231 if (flag_errno_math && errno_set)
2232 return NULL_RTX;
2234 /* Optab mode depends on the mode of the input argument. */
2235 mode = TYPE_MODE (TREE_TYPE (arg));
2237 if (builtin_optab)
2238 icode = optab_handler (builtin_optab, mode)->insn_code;
2240 /* Before working hard, check whether the instruction is available. */
2241 if (icode != CODE_FOR_nothing)
2243 /* Make a suitable register to place result in. */
2244 if (!target
2245 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2246 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2248 gcc_assert (insn_data[icode].operand[0].predicate
2249 (target, GET_MODE (target)));
2251 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2252 need to expand the argument again. This way, we will not perform
2253 side-effects more the once. */
2254 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2256 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2258 if (mode != GET_MODE (op0))
2259 op0 = convert_to_mode (mode, op0, 0);
2261 /* Compute into TARGET.
2262 Set TARGET to wherever the result comes back. */
2263 emit_unop_insn (icode, target, op0, UNKNOWN);
2264 return target;
2267 /* If there is no optab, try generic code. */
2268 switch (DECL_FUNCTION_CODE (fndecl))
2270 tree result;
2272 CASE_FLT_FN (BUILT_IN_ISINF):
2274 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2275 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2276 tree const type = TREE_TYPE (arg);
2277 REAL_VALUE_TYPE r;
2278 char buf[128];
2280 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2281 real_from_string (&r, buf);
2282 result = build_call_expr (isgr_fn, 2,
2283 fold_build1 (ABS_EXPR, type, arg),
2284 build_real (type, r));
2285 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2287 CASE_FLT_FN (BUILT_IN_FINITE):
2288 case BUILT_IN_ISFINITE:
2290 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2291 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2292 tree const type = TREE_TYPE (arg);
2293 REAL_VALUE_TYPE r;
2294 char buf[128];
2296 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2297 real_from_string (&r, buf);
2298 result = build_call_expr (isle_fn, 2,
2299 fold_build1 (ABS_EXPR, type, arg),
2300 build_real (type, r));
2301 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2303 case BUILT_IN_ISNORMAL:
2305 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2306 islessequal(fabs(x),DBL_MAX). */
2307 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2308 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2309 tree const type = TREE_TYPE (arg);
2310 REAL_VALUE_TYPE rmax, rmin;
2311 char buf[128];
2313 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2314 real_from_string (&rmax, buf);
2315 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2316 real_from_string (&rmin, buf);
2317 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2318 result = build_call_expr (isle_fn, 2, arg,
2319 build_real (type, rmax));
2320 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2321 build_call_expr (isge_fn, 2, arg,
2322 build_real (type, rmin)));
2323 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2325 default:
2326 break;
2329 target = expand_call (exp, target, target == const0_rtx);
2331 return target;
2334 /* Expand a call to the builtin sincos math function.
2335 Return NULL_RTX if a normal call should be emitted rather than expanding the
2336 function in-line. EXP is the expression that is a call to the builtin
2337 function. */
2339 static rtx
2340 expand_builtin_sincos (tree exp)
2342 rtx op0, op1, op2, target1, target2;
2343 enum machine_mode mode;
2344 tree arg, sinp, cosp;
2345 int result;
2347 if (!validate_arglist (exp, REAL_TYPE,
2348 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2349 return NULL_RTX;
2351 arg = CALL_EXPR_ARG (exp, 0);
2352 sinp = CALL_EXPR_ARG (exp, 1);
2353 cosp = CALL_EXPR_ARG (exp, 2);
2355 /* Make a suitable register to place result in. */
2356 mode = TYPE_MODE (TREE_TYPE (arg));
2358 /* Check if sincos insn is available, otherwise emit the call. */
2359 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2360 return NULL_RTX;
2362 target1 = gen_reg_rtx (mode);
2363 target2 = gen_reg_rtx (mode);
2365 op0 = expand_normal (arg);
2366 op1 = expand_normal (build_fold_indirect_ref (sinp));
2367 op2 = expand_normal (build_fold_indirect_ref (cosp));
2369 /* Compute into target1 and target2.
2370 Set TARGET to wherever the result comes back. */
2371 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2372 gcc_assert (result);
2374 /* Move target1 and target2 to the memory locations indicated
2375 by op1 and op2. */
2376 emit_move_insn (op1, target1);
2377 emit_move_insn (op2, target2);
2379 return const0_rtx;
2382 /* Expand a call to the internal cexpi builtin to the sincos math function.
2383 EXP is the expression that is a call to the builtin function; if convenient,
2384 the result should be placed in TARGET. SUBTARGET may be used as the target
2385 for computing one of EXP's operands. */
2387 static rtx
2388 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2390 tree fndecl = get_callee_fndecl (exp);
2391 tree arg, type;
2392 enum machine_mode mode;
2393 rtx op0, op1, op2;
2395 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2396 return NULL_RTX;
2398 arg = CALL_EXPR_ARG (exp, 0);
2399 type = TREE_TYPE (arg);
2400 mode = TYPE_MODE (TREE_TYPE (arg));
2402 /* Try expanding via a sincos optab, fall back to emitting a libcall
2403 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2404 is only generated from sincos, cexp or if we have either of them. */
2405 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2407 op1 = gen_reg_rtx (mode);
2408 op2 = gen_reg_rtx (mode);
2410 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2412 /* Compute into op1 and op2. */
2413 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2415 else if (TARGET_HAS_SINCOS)
2417 tree call, fn = NULL_TREE;
2418 tree top1, top2;
2419 rtx op1a, op2a;
2421 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2422 fn = built_in_decls[BUILT_IN_SINCOSF];
2423 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2424 fn = built_in_decls[BUILT_IN_SINCOS];
2425 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2426 fn = built_in_decls[BUILT_IN_SINCOSL];
2427 else
2428 gcc_unreachable ();
2430 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2431 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2432 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2433 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2434 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2435 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2437 /* Make sure not to fold the sincos call again. */
2438 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2439 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2440 call, 3, arg, top1, top2));
2442 else
2444 tree call, fn = NULL_TREE, narg;
2445 tree ctype = build_complex_type (type);
2447 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2448 fn = built_in_decls[BUILT_IN_CEXPF];
2449 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2450 fn = built_in_decls[BUILT_IN_CEXP];
2451 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2452 fn = built_in_decls[BUILT_IN_CEXPL];
2453 else
2454 gcc_unreachable ();
2456 /* If we don't have a decl for cexp create one. This is the
2457 friendliest fallback if the user calls __builtin_cexpi
2458 without full target C99 function support. */
2459 if (fn == NULL_TREE)
2461 tree fntype;
2462 const char *name = NULL;
2464 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2465 name = "cexpf";
2466 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2467 name = "cexp";
2468 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2469 name = "cexpl";
2471 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2472 fn = build_fn_decl (name, fntype);
2475 narg = fold_build2 (COMPLEX_EXPR, ctype,
2476 build_real (type, dconst0), arg);
2478 /* Make sure not to fold the cexp call again. */
2479 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2480 return expand_expr (build_call_nary (ctype, call, 1, narg),
2481 target, VOIDmode, EXPAND_NORMAL);
2484 /* Now build the proper return type. */
2485 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2486 make_tree (TREE_TYPE (arg), op2),
2487 make_tree (TREE_TYPE (arg), op1)),
2488 target, VOIDmode, EXPAND_NORMAL);
2491 /* Expand a call to one of the builtin rounding functions gcc defines
2492 as an extension (lfloor and lceil). As these are gcc extensions we
2493 do not need to worry about setting errno to EDOM.
2494 If expanding via optab fails, lower expression to (int)(floor(x)).
2495 EXP is the expression that is a call to the builtin function;
2496 if convenient, the result should be placed in TARGET. SUBTARGET may
2497 be used as the target for computing one of EXP's operands. */
2499 static rtx
2500 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2502 convert_optab builtin_optab;
2503 rtx op0, insns, tmp;
2504 tree fndecl = get_callee_fndecl (exp);
2505 enum built_in_function fallback_fn;
2506 tree fallback_fndecl;
2507 enum machine_mode mode;
2508 tree arg;
2510 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2511 gcc_unreachable ();
2513 arg = CALL_EXPR_ARG (exp, 0);
2515 switch (DECL_FUNCTION_CODE (fndecl))
2517 CASE_FLT_FN (BUILT_IN_LCEIL):
2518 CASE_FLT_FN (BUILT_IN_LLCEIL):
2519 builtin_optab = lceil_optab;
2520 fallback_fn = BUILT_IN_CEIL;
2521 break;
2523 CASE_FLT_FN (BUILT_IN_LFLOOR):
2524 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2525 builtin_optab = lfloor_optab;
2526 fallback_fn = BUILT_IN_FLOOR;
2527 break;
2529 default:
2530 gcc_unreachable ();
2533 /* Make a suitable register to place result in. */
2534 mode = TYPE_MODE (TREE_TYPE (exp));
2536 target = gen_reg_rtx (mode);
2538 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2539 need to expand the argument again. This way, we will not perform
2540 side-effects more the once. */
2541 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2543 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2545 start_sequence ();
2547 /* Compute into TARGET. */
2548 if (expand_sfix_optab (target, op0, builtin_optab))
2550 /* Output the entire sequence. */
2551 insns = get_insns ();
2552 end_sequence ();
2553 emit_insn (insns);
2554 return target;
2557 /* If we were unable to expand via the builtin, stop the sequence
2558 (without outputting the insns). */
2559 end_sequence ();
2561 /* Fall back to floating point rounding optab. */
2562 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2564 /* For non-C99 targets we may end up without a fallback fndecl here
2565 if the user called __builtin_lfloor directly. In this case emit
2566 a call to the floor/ceil variants nevertheless. This should result
2567 in the best user experience for not full C99 targets. */
2568 if (fallback_fndecl == NULL_TREE)
2570 tree fntype;
2571 const char *name = NULL;
2573 switch (DECL_FUNCTION_CODE (fndecl))
2575 case BUILT_IN_LCEIL:
2576 case BUILT_IN_LLCEIL:
2577 name = "ceil";
2578 break;
2579 case BUILT_IN_LCEILF:
2580 case BUILT_IN_LLCEILF:
2581 name = "ceilf";
2582 break;
2583 case BUILT_IN_LCEILL:
2584 case BUILT_IN_LLCEILL:
2585 name = "ceill";
2586 break;
2587 case BUILT_IN_LFLOOR:
2588 case BUILT_IN_LLFLOOR:
2589 name = "floor";
2590 break;
2591 case BUILT_IN_LFLOORF:
2592 case BUILT_IN_LLFLOORF:
2593 name = "floorf";
2594 break;
2595 case BUILT_IN_LFLOORL:
2596 case BUILT_IN_LLFLOORL:
2597 name = "floorl";
2598 break;
2599 default:
2600 gcc_unreachable ();
2603 fntype = build_function_type_list (TREE_TYPE (arg),
2604 TREE_TYPE (arg), NULL_TREE);
2605 fallback_fndecl = build_fn_decl (name, fntype);
2608 exp = build_call_expr (fallback_fndecl, 1, arg);
2610 tmp = expand_normal (exp);
2612 /* Truncate the result of floating point optab to integer
2613 via expand_fix (). */
2614 target = gen_reg_rtx (mode);
2615 expand_fix (target, tmp, 0);
2617 return target;
2620 /* Expand a call to one of the builtin math functions doing integer
2621 conversion (lrint).
2622 Return 0 if a normal call should be emitted rather than expanding the
2623 function in-line. EXP is the expression that is a call to the builtin
2624 function; if convenient, the result should be placed in TARGET.
2625 SUBTARGET may be used as the target for computing one of EXP's operands. */
2627 static rtx
2628 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2630 convert_optab builtin_optab;
2631 rtx op0, insns;
2632 tree fndecl = get_callee_fndecl (exp);
2633 tree arg;
2634 enum machine_mode mode;
2636 /* There's no easy way to detect the case we need to set EDOM. */
2637 if (flag_errno_math)
2638 return NULL_RTX;
2640 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2641 gcc_unreachable ();
2643 arg = CALL_EXPR_ARG (exp, 0);
2645 switch (DECL_FUNCTION_CODE (fndecl))
2647 CASE_FLT_FN (BUILT_IN_LRINT):
2648 CASE_FLT_FN (BUILT_IN_LLRINT):
2649 builtin_optab = lrint_optab; break;
2650 CASE_FLT_FN (BUILT_IN_LROUND):
2651 CASE_FLT_FN (BUILT_IN_LLROUND):
2652 builtin_optab = lround_optab; break;
2653 default:
2654 gcc_unreachable ();
2657 /* Make a suitable register to place result in. */
2658 mode = TYPE_MODE (TREE_TYPE (exp));
2660 target = gen_reg_rtx (mode);
2662 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2663 need to expand the argument again. This way, we will not perform
2664 side-effects more the once. */
2665 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2667 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2669 start_sequence ();
2671 if (expand_sfix_optab (target, op0, builtin_optab))
2673 /* Output the entire sequence. */
2674 insns = get_insns ();
2675 end_sequence ();
2676 emit_insn (insns);
2677 return target;
2680 /* If we were unable to expand via the builtin, stop the sequence
2681 (without outputting the insns) and call to the library function
2682 with the stabilized argument list. */
2683 end_sequence ();
2685 target = expand_call (exp, target, target == const0_rtx);
2687 return target;
2690 /* To evaluate powi(x,n), the floating point value x raised to the
2691 constant integer exponent n, we use a hybrid algorithm that
2692 combines the "window method" with look-up tables. For an
2693 introduction to exponentiation algorithms and "addition chains",
2694 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2695 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2696 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2697 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2699 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2700 multiplications to inline before calling the system library's pow
2701 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2702 so this default never requires calling pow, powf or powl. */
2704 #ifndef POWI_MAX_MULTS
2705 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2706 #endif
2708 /* The size of the "optimal power tree" lookup table. All
2709 exponents less than this value are simply looked up in the
2710 powi_table below. This threshold is also used to size the
2711 cache of pseudo registers that hold intermediate results. */
2712 #define POWI_TABLE_SIZE 256
2714 /* The size, in bits of the window, used in the "window method"
2715 exponentiation algorithm. This is equivalent to a radix of
2716 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2717 #define POWI_WINDOW_SIZE 3
2719 /* The following table is an efficient representation of an
2720 "optimal power tree". For each value, i, the corresponding
2721 value, j, in the table states than an optimal evaluation
2722 sequence for calculating pow(x,i) can be found by evaluating
2723 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2724 100 integers is given in Knuth's "Seminumerical algorithms". */
2726 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2728 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2729 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2730 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2731 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2732 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2733 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2734 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2735 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2736 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2737 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2738 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2739 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2740 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2741 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2742 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2743 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2744 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2745 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2746 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2747 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2748 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2749 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2750 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2751 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2752 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2753 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2754 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2755 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2756 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2757 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2758 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2759 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2763 /* Return the number of multiplications required to calculate
2764 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2765 subroutine of powi_cost. CACHE is an array indicating
2766 which exponents have already been calculated. */
2768 static int
2769 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2771 /* If we've already calculated this exponent, then this evaluation
2772 doesn't require any additional multiplications. */
2773 if (cache[n])
2774 return 0;
2776 cache[n] = true;
2777 return powi_lookup_cost (n - powi_table[n], cache)
2778 + powi_lookup_cost (powi_table[n], cache) + 1;
2781 /* Return the number of multiplications required to calculate
2782 powi(x,n) for an arbitrary x, given the exponent N. This
2783 function needs to be kept in sync with expand_powi below. */
2785 static int
2786 powi_cost (HOST_WIDE_INT n)
2788 bool cache[POWI_TABLE_SIZE];
2789 unsigned HOST_WIDE_INT digit;
2790 unsigned HOST_WIDE_INT val;
2791 int result;
2793 if (n == 0)
2794 return 0;
2796 /* Ignore the reciprocal when calculating the cost. */
2797 val = (n < 0) ? -n : n;
2799 /* Initialize the exponent cache. */
2800 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2801 cache[1] = true;
2803 result = 0;
2805 while (val >= POWI_TABLE_SIZE)
2807 if (val & 1)
2809 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2810 result += powi_lookup_cost (digit, cache)
2811 + POWI_WINDOW_SIZE + 1;
2812 val >>= POWI_WINDOW_SIZE;
2814 else
2816 val >>= 1;
2817 result++;
2821 return result + powi_lookup_cost (val, cache);
2824 /* Recursive subroutine of expand_powi. This function takes the array,
2825 CACHE, of already calculated exponents and an exponent N and returns
2826 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2828 static rtx
2829 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2831 unsigned HOST_WIDE_INT digit;
2832 rtx target, result;
2833 rtx op0, op1;
2835 if (n < POWI_TABLE_SIZE)
2837 if (cache[n])
2838 return cache[n];
2840 target = gen_reg_rtx (mode);
2841 cache[n] = target;
2843 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2844 op1 = expand_powi_1 (mode, powi_table[n], cache);
2846 else if (n & 1)
2848 target = gen_reg_rtx (mode);
2849 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2850 op0 = expand_powi_1 (mode, n - digit, cache);
2851 op1 = expand_powi_1 (mode, digit, cache);
2853 else
2855 target = gen_reg_rtx (mode);
2856 op0 = expand_powi_1 (mode, n >> 1, cache);
2857 op1 = op0;
2860 result = expand_mult (mode, op0, op1, target, 0);
2861 if (result != target)
2862 emit_move_insn (target, result);
2863 return target;
2866 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2867 floating point operand in mode MODE, and N is the exponent. This
2868 function needs to be kept in sync with powi_cost above. */
2870 static rtx
2871 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2873 unsigned HOST_WIDE_INT val;
2874 rtx cache[POWI_TABLE_SIZE];
2875 rtx result;
2877 if (n == 0)
2878 return CONST1_RTX (mode);
2880 val = (n < 0) ? -n : n;
2882 memset (cache, 0, sizeof (cache));
2883 cache[1] = x;
2885 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2887 /* If the original exponent was negative, reciprocate the result. */
2888 if (n < 0)
2889 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2890 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2892 return result;
2895 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2896 a normal call should be emitted rather than expanding the function
2897 in-line. EXP is the expression that is a call to the builtin
2898 function; if convenient, the result should be placed in TARGET. */
2900 static rtx
2901 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2903 tree arg0, arg1;
2904 tree fn, narg0;
2905 tree type = TREE_TYPE (exp);
2906 REAL_VALUE_TYPE cint, c, c2;
2907 HOST_WIDE_INT n;
2908 rtx op, op2;
2909 enum machine_mode mode = TYPE_MODE (type);
2911 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2912 return NULL_RTX;
2914 arg0 = CALL_EXPR_ARG (exp, 0);
2915 arg1 = CALL_EXPR_ARG (exp, 1);
2917 if (TREE_CODE (arg1) != REAL_CST
2918 || TREE_OVERFLOW (arg1))
2919 return expand_builtin_mathfn_2 (exp, target, subtarget);
2921 /* Handle constant exponents. */
2923 /* For integer valued exponents we can expand to an optimal multiplication
2924 sequence using expand_powi. */
2925 c = TREE_REAL_CST (arg1);
2926 n = real_to_integer (&c);
2927 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2928 if (real_identical (&c, &cint)
2929 && ((n >= -1 && n <= 2)
2930 || (flag_unsafe_math_optimizations
2931 && !optimize_size
2932 && powi_cost (n) <= POWI_MAX_MULTS)))
2934 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2935 if (n != 1)
2937 op = force_reg (mode, op);
2938 op = expand_powi (op, mode, n);
2940 return op;
2943 narg0 = builtin_save_expr (arg0);
2945 /* If the exponent is not integer valued, check if it is half of an integer.
2946 In this case we can expand to sqrt (x) * x**(n/2). */
2947 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2948 if (fn != NULL_TREE)
2950 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2951 n = real_to_integer (&c2);
2952 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2953 if (real_identical (&c2, &cint)
2954 && ((flag_unsafe_math_optimizations
2955 && !optimize_size
2956 && powi_cost (n/2) <= POWI_MAX_MULTS)
2957 || n == 1))
2959 tree call_expr = build_call_expr (fn, 1, narg0);
2960 /* Use expand_expr in case the newly built call expression
2961 was folded to a non-call. */
2962 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2963 if (n != 1)
2965 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2966 op2 = force_reg (mode, op2);
2967 op2 = expand_powi (op2, mode, abs (n / 2));
2968 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2969 0, OPTAB_LIB_WIDEN);
2970 /* If the original exponent was negative, reciprocate the
2971 result. */
2972 if (n < 0)
2973 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2974 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2976 return op;
2980 /* Try if the exponent is a third of an integer. In this case
2981 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2982 different from pow (x, 1./3.) due to rounding and behavior
2983 with negative x we need to constrain this transformation to
2984 unsafe math and positive x or finite math. */
2985 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2986 if (fn != NULL_TREE
2987 && flag_unsafe_math_optimizations
2988 && (tree_expr_nonnegative_p (arg0)
2989 || !HONOR_NANS (mode)))
2991 REAL_VALUE_TYPE dconst3;
2992 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2993 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2994 real_round (&c2, mode, &c2);
2995 n = real_to_integer (&c2);
2996 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2997 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2998 real_convert (&c2, mode, &c2);
2999 if (real_identical (&c2, &c)
3000 && ((!optimize_size
3001 && powi_cost (n/3) <= POWI_MAX_MULTS)
3002 || n == 1))
3004 tree call_expr = build_call_expr (fn, 1,narg0);
3005 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3006 if (abs (n) % 3 == 2)
3007 op = expand_simple_binop (mode, MULT, op, op, op,
3008 0, OPTAB_LIB_WIDEN);
3009 if (n != 1)
3011 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3012 op2 = force_reg (mode, op2);
3013 op2 = expand_powi (op2, mode, abs (n / 3));
3014 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3015 0, OPTAB_LIB_WIDEN);
3016 /* If the original exponent was negative, reciprocate the
3017 result. */
3018 if (n < 0)
3019 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3020 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3022 return op;
3026 /* Fall back to optab expansion. */
3027 return expand_builtin_mathfn_2 (exp, target, subtarget);
3030 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3031 a normal call should be emitted rather than expanding the function
3032 in-line. EXP is the expression that is a call to the builtin
3033 function; if convenient, the result should be placed in TARGET. */
3035 static rtx
3036 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3038 tree arg0, arg1;
3039 rtx op0, op1;
3040 enum machine_mode mode;
3041 enum machine_mode mode2;
3043 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3044 return NULL_RTX;
3046 arg0 = CALL_EXPR_ARG (exp, 0);
3047 arg1 = CALL_EXPR_ARG (exp, 1);
3048 mode = TYPE_MODE (TREE_TYPE (exp));
3050 /* Handle constant power. */
3052 if (TREE_CODE (arg1) == INTEGER_CST
3053 && !TREE_OVERFLOW (arg1))
3055 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3057 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3058 Otherwise, check the number of multiplications required. */
3059 if ((TREE_INT_CST_HIGH (arg1) == 0
3060 || TREE_INT_CST_HIGH (arg1) == -1)
3061 && ((n >= -1 && n <= 2)
3062 || (! optimize_size
3063 && powi_cost (n) <= POWI_MAX_MULTS)))
3065 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3066 op0 = force_reg (mode, op0);
3067 return expand_powi (op0, mode, n);
3071 /* Emit a libcall to libgcc. */
3073 /* Mode of the 2nd argument must match that of an int. */
3074 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3076 if (target == NULL_RTX)
3077 target = gen_reg_rtx (mode);
3079 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3080 if (GET_MODE (op0) != mode)
3081 op0 = convert_to_mode (mode, op0, 0);
3082 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3083 if (GET_MODE (op1) != mode2)
3084 op1 = convert_to_mode (mode2, op1, 0);
3086 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3087 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3088 op0, mode, op1, mode2);
3090 return target;
3093 /* Expand expression EXP which is a call to the strlen builtin. Return
3094 NULL_RTX if we failed the caller should emit a normal call, otherwise
3095 try to get the result in TARGET, if convenient. */
3097 static rtx
3098 expand_builtin_strlen (tree exp, rtx target,
3099 enum machine_mode target_mode)
3101 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3102 return NULL_RTX;
3103 else
3105 rtx pat;
3106 tree len;
3107 tree src = CALL_EXPR_ARG (exp, 0);
3108 rtx result, src_reg, char_rtx, before_strlen;
3109 enum machine_mode insn_mode = target_mode, char_mode;
3110 enum insn_code icode = CODE_FOR_nothing;
3111 int align;
3113 /* If the length can be computed at compile-time, return it. */
3114 len = c_strlen (src, 0);
3115 if (len)
3116 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3118 /* If the length can be computed at compile-time and is constant
3119 integer, but there are side-effects in src, evaluate
3120 src for side-effects, then return len.
3121 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3122 can be optimized into: i++; x = 3; */
3123 len = c_strlen (src, 1);
3124 if (len && TREE_CODE (len) == INTEGER_CST)
3126 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3127 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3130 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3132 /* If SRC is not a pointer type, don't do this operation inline. */
3133 if (align == 0)
3134 return NULL_RTX;
3136 /* Bail out if we can't compute strlen in the right mode. */
3137 while (insn_mode != VOIDmode)
3139 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3140 if (icode != CODE_FOR_nothing)
3141 break;
3143 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3145 if (insn_mode == VOIDmode)
3146 return NULL_RTX;
3148 /* Make a place to write the result of the instruction. */
3149 result = target;
3150 if (! (result != 0
3151 && REG_P (result)
3152 && GET_MODE (result) == insn_mode
3153 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3154 result = gen_reg_rtx (insn_mode);
3156 /* Make a place to hold the source address. We will not expand
3157 the actual source until we are sure that the expansion will
3158 not fail -- there are trees that cannot be expanded twice. */
3159 src_reg = gen_reg_rtx (Pmode);
3161 /* Mark the beginning of the strlen sequence so we can emit the
3162 source operand later. */
3163 before_strlen = get_last_insn ();
3165 char_rtx = const0_rtx;
3166 char_mode = insn_data[(int) icode].operand[2].mode;
3167 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3168 char_mode))
3169 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3171 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3172 char_rtx, GEN_INT (align));
3173 if (! pat)
3174 return NULL_RTX;
3175 emit_insn (pat);
3177 /* Now that we are assured of success, expand the source. */
3178 start_sequence ();
3179 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3180 if (pat != src_reg)
3181 emit_move_insn (src_reg, pat);
3182 pat = get_insns ();
3183 end_sequence ();
3185 if (before_strlen)
3186 emit_insn_after (pat, before_strlen);
3187 else
3188 emit_insn_before (pat, get_insns ());
3190 /* Return the value in the proper mode for this function. */
3191 if (GET_MODE (result) == target_mode)
3192 target = result;
3193 else if (target != 0)
3194 convert_move (target, result, 0);
3195 else
3196 target = convert_to_mode (target_mode, result, 0);
3198 return target;
3202 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3203 caller should emit a normal call, otherwise try to get the result
3204 in TARGET, if convenient (and in mode MODE if that's convenient). */
3206 static rtx
3207 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3209 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3211 tree type = TREE_TYPE (exp);
3212 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3213 CALL_EXPR_ARG (exp, 1), type);
3214 if (result)
3215 return expand_expr (result, target, mode, EXPAND_NORMAL);
3217 return NULL_RTX;
3220 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3221 caller should emit a normal call, otherwise try to get the result
3222 in TARGET, if convenient (and in mode MODE if that's convenient). */
3224 static rtx
3225 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3227 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3229 tree type = TREE_TYPE (exp);
3230 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3231 CALL_EXPR_ARG (exp, 1), type);
3232 if (result)
3233 return expand_expr (result, target, mode, EXPAND_NORMAL);
3235 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3237 return NULL_RTX;
3240 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3241 caller should emit a normal call, otherwise try to get the result
3242 in TARGET, if convenient (and in mode MODE if that's convenient). */
3244 static rtx
3245 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3247 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3249 tree type = TREE_TYPE (exp);
3250 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3251 CALL_EXPR_ARG (exp, 1), type);
3252 if (result)
3253 return expand_expr (result, target, mode, EXPAND_NORMAL);
3255 return NULL_RTX;
3258 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3259 caller should emit a normal call, otherwise try to get the result
3260 in TARGET, if convenient (and in mode MODE if that's convenient). */
3262 static rtx
3263 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3265 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3267 tree type = TREE_TYPE (exp);
3268 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3269 CALL_EXPR_ARG (exp, 1), type);
3270 if (result)
3271 return expand_expr (result, target, mode, EXPAND_NORMAL);
3273 return NULL_RTX;
3276 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3277 bytes from constant string DATA + OFFSET and return it as target
3278 constant. */
3280 static rtx
3281 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3282 enum machine_mode mode)
3284 const char *str = (const char *) data;
3286 gcc_assert (offset >= 0
3287 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3288 <= strlen (str) + 1));
3290 return c_readstr (str + offset, mode);
3293 /* Expand a call EXP to the memcpy builtin.
3294 Return NULL_RTX if we failed, the caller should emit a normal call,
3295 otherwise try to get the result in TARGET, if convenient (and in
3296 mode MODE if that's convenient). */
3298 static rtx
3299 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3301 tree fndecl = get_callee_fndecl (exp);
3303 if (!validate_arglist (exp,
3304 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3305 return NULL_RTX;
3306 else
3308 tree dest = CALL_EXPR_ARG (exp, 0);
3309 tree src = CALL_EXPR_ARG (exp, 1);
3310 tree len = CALL_EXPR_ARG (exp, 2);
3311 const char *src_str;
3312 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3313 unsigned int dest_align
3314 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3315 rtx dest_mem, src_mem, dest_addr, len_rtx;
3316 tree result = fold_builtin_memory_op (dest, src, len,
3317 TREE_TYPE (TREE_TYPE (fndecl)),
3318 false, /*endp=*/0);
3319 HOST_WIDE_INT expected_size = -1;
3320 unsigned int expected_align = 0;
3322 if (result)
3324 while (TREE_CODE (result) == COMPOUND_EXPR)
3326 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3327 EXPAND_NORMAL);
3328 result = TREE_OPERAND (result, 1);
3330 return expand_expr (result, target, mode, EXPAND_NORMAL);
3333 /* If DEST is not a pointer type, call the normal function. */
3334 if (dest_align == 0)
3335 return NULL_RTX;
3337 /* If either SRC is not a pointer type, don't do this
3338 operation in-line. */
3339 if (src_align == 0)
3340 return NULL_RTX;
3342 stringop_block_profile (exp, &expected_align, &expected_size);
3343 if (expected_align < dest_align)
3344 expected_align = dest_align;
3345 dest_mem = get_memory_rtx (dest, len);
3346 set_mem_align (dest_mem, dest_align);
3347 len_rtx = expand_normal (len);
3348 src_str = c_getstr (src);
3350 /* If SRC is a string constant and block move would be done
3351 by pieces, we can avoid loading the string from memory
3352 and only stored the computed constants. */
3353 if (src_str
3354 && GET_CODE (len_rtx) == CONST_INT
3355 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3356 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3357 (void *) src_str, dest_align, false))
3359 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3360 builtin_memcpy_read_str,
3361 (void *) src_str, dest_align, false, 0);
3362 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3363 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3364 return dest_mem;
3367 src_mem = get_memory_rtx (src, len);
3368 set_mem_align (src_mem, src_align);
3370 /* Copy word part most expediently. */
3371 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3372 CALL_EXPR_TAILCALL (exp)
3373 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3374 expected_align, expected_size);
3376 if (dest_addr == 0)
3378 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3379 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3381 return dest_addr;
3385 /* Expand a call EXP to the mempcpy builtin.
3386 Return NULL_RTX if we failed; the caller should emit a normal call,
3387 otherwise try to get the result in TARGET, if convenient (and in
3388 mode MODE if that's convenient). If ENDP is 0 return the
3389 destination pointer, if ENDP is 1 return the end pointer ala
3390 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3391 stpcpy. */
3393 static rtx
3394 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3396 if (!validate_arglist (exp,
3397 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3398 return NULL_RTX;
3399 else
3401 tree dest = CALL_EXPR_ARG (exp, 0);
3402 tree src = CALL_EXPR_ARG (exp, 1);
3403 tree len = CALL_EXPR_ARG (exp, 2);
3404 return expand_builtin_mempcpy_args (dest, src, len,
3405 TREE_TYPE (exp),
3406 target, mode, /*endp=*/ 1);
3410 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3411 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3412 so that this can also be called without constructing an actual CALL_EXPR.
3413 TYPE is the return type of the call. The other arguments and return value
3414 are the same as for expand_builtin_mempcpy. */
3416 static rtx
3417 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3418 rtx target, enum machine_mode mode, int endp)
3420 /* If return value is ignored, transform mempcpy into memcpy. */
3421 if (target == const0_rtx)
3423 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3425 if (!fn)
3426 return NULL_RTX;
3428 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3429 target, mode, EXPAND_NORMAL);
3431 else
3433 const char *src_str;
3434 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3435 unsigned int dest_align
3436 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3437 rtx dest_mem, src_mem, len_rtx;
3438 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3440 if (result)
3442 while (TREE_CODE (result) == COMPOUND_EXPR)
3444 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3445 EXPAND_NORMAL);
3446 result = TREE_OPERAND (result, 1);
3448 return expand_expr (result, target, mode, EXPAND_NORMAL);
3451 /* If either SRC or DEST is not a pointer type, don't do this
3452 operation in-line. */
3453 if (dest_align == 0 || src_align == 0)
3454 return NULL_RTX;
3456 /* If LEN is not constant, call the normal function. */
3457 if (! host_integerp (len, 1))
3458 return NULL_RTX;
3460 len_rtx = expand_normal (len);
3461 src_str = c_getstr (src);
3463 /* If SRC is a string constant and block move would be done
3464 by pieces, we can avoid loading the string from memory
3465 and only stored the computed constants. */
3466 if (src_str
3467 && GET_CODE (len_rtx) == CONST_INT
3468 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3469 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3470 (void *) src_str, dest_align, false))
3472 dest_mem = get_memory_rtx (dest, len);
3473 set_mem_align (dest_mem, dest_align);
3474 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3475 builtin_memcpy_read_str,
3476 (void *) src_str, dest_align,
3477 false, endp);
3478 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3479 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3480 return dest_mem;
3483 if (GET_CODE (len_rtx) == CONST_INT
3484 && can_move_by_pieces (INTVAL (len_rtx),
3485 MIN (dest_align, src_align)))
3487 dest_mem = get_memory_rtx (dest, len);
3488 set_mem_align (dest_mem, dest_align);
3489 src_mem = get_memory_rtx (src, len);
3490 set_mem_align (src_mem, src_align);
3491 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3492 MIN (dest_align, src_align), endp);
3493 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3494 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3495 return dest_mem;
3498 return NULL_RTX;
3502 /* Expand expression EXP, which is a call to the memmove builtin. Return
3503 NULL_RTX if we failed; the caller should emit a normal call. */
3505 static rtx
3506 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3508 if (!validate_arglist (exp,
3509 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3510 return NULL_RTX;
3511 else
3513 tree dest = CALL_EXPR_ARG (exp, 0);
3514 tree src = CALL_EXPR_ARG (exp, 1);
3515 tree len = CALL_EXPR_ARG (exp, 2);
3516 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3517 target, mode, ignore);
3521 /* Helper function to do the actual work for expand_builtin_memmove. The
3522 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3523 so that this can also be called without constructing an actual CALL_EXPR.
3524 TYPE is the return type of the call. The other arguments and return value
3525 are the same as for expand_builtin_memmove. */
3527 static rtx
3528 expand_builtin_memmove_args (tree dest, tree src, tree len,
3529 tree type, rtx target, enum machine_mode mode,
3530 int ignore)
3532 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3534 if (result)
3536 STRIP_TYPE_NOPS (result);
3537 while (TREE_CODE (result) == COMPOUND_EXPR)
3539 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3540 EXPAND_NORMAL);
3541 result = TREE_OPERAND (result, 1);
3543 return expand_expr (result, target, mode, EXPAND_NORMAL);
3546 /* Otherwise, call the normal function. */
3547 return NULL_RTX;
3550 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3551 NULL_RTX if we failed the caller should emit a normal call. */
3553 static rtx
3554 expand_builtin_bcopy (tree exp, int ignore)
3556 tree type = TREE_TYPE (exp);
3557 tree src, dest, size;
3559 if (!validate_arglist (exp,
3560 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3561 return NULL_RTX;
3563 src = CALL_EXPR_ARG (exp, 0);
3564 dest = CALL_EXPR_ARG (exp, 1);
3565 size = CALL_EXPR_ARG (exp, 2);
3567 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3568 This is done this way so that if it isn't expanded inline, we fall
3569 back to calling bcopy instead of memmove. */
3570 return expand_builtin_memmove_args (dest, src,
3571 fold_convert (sizetype, size),
3572 type, const0_rtx, VOIDmode,
3573 ignore);
3576 #ifndef HAVE_movstr
3577 # define HAVE_movstr 0
3578 # define CODE_FOR_movstr CODE_FOR_nothing
3579 #endif
3581 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3582 we failed, the caller should emit a normal call, otherwise try to
3583 get the result in TARGET, if convenient. If ENDP is 0 return the
3584 destination pointer, if ENDP is 1 return the end pointer ala
3585 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3586 stpcpy. */
3588 static rtx
3589 expand_movstr (tree dest, tree src, rtx target, int endp)
3591 rtx end;
3592 rtx dest_mem;
3593 rtx src_mem;
3594 rtx insn;
3595 const struct insn_data * data;
3597 if (!HAVE_movstr)
3598 return NULL_RTX;
3600 dest_mem = get_memory_rtx (dest, NULL);
3601 src_mem = get_memory_rtx (src, NULL);
3602 if (!endp)
3604 target = force_reg (Pmode, XEXP (dest_mem, 0));
3605 dest_mem = replace_equiv_address (dest_mem, target);
3606 end = gen_reg_rtx (Pmode);
3608 else
3610 if (target == 0 || target == const0_rtx)
3612 end = gen_reg_rtx (Pmode);
3613 if (target == 0)
3614 target = end;
3616 else
3617 end = target;
3620 data = insn_data + CODE_FOR_movstr;
3622 if (data->operand[0].mode != VOIDmode)
3623 end = gen_lowpart (data->operand[0].mode, end);
3625 insn = data->genfun (end, dest_mem, src_mem);
3627 gcc_assert (insn);
3629 emit_insn (insn);
3631 /* movstr is supposed to set end to the address of the NUL
3632 terminator. If the caller requested a mempcpy-like return value,
3633 adjust it. */
3634 if (endp == 1 && target != const0_rtx)
3636 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3637 emit_move_insn (target, force_operand (tem, NULL_RTX));
3640 return target;
3643 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3644 NULL_RTX if we failed the caller should emit a normal call, otherwise
3645 try to get the result in TARGET, if convenient (and in mode MODE if that's
3646 convenient). */
3648 static rtx
3649 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3651 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3653 tree dest = CALL_EXPR_ARG (exp, 0);
3654 tree src = CALL_EXPR_ARG (exp, 1);
3655 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3657 return NULL_RTX;
3660 /* Helper function to do the actual work for expand_builtin_strcpy. The
3661 arguments to the builtin_strcpy call DEST and SRC are broken out
3662 so that this can also be called without constructing an actual CALL_EXPR.
3663 The other arguments and return value are the same as for
3664 expand_builtin_strcpy. */
3666 static rtx
3667 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3668 rtx target, enum machine_mode mode)
3670 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3671 if (result)
3672 return expand_expr (result, target, mode, EXPAND_NORMAL);
3673 return expand_movstr (dest, src, target, /*endp=*/0);
3677 /* Expand a call EXP to the stpcpy builtin.
3678 Return NULL_RTX if we failed the caller should emit a normal call,
3679 otherwise try to get the result in TARGET, if convenient (and in
3680 mode MODE if that's convenient). */
3682 static rtx
3683 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3685 tree dst, src;
3687 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3688 return NULL_RTX;
3690 dst = CALL_EXPR_ARG (exp, 0);
3691 src = CALL_EXPR_ARG (exp, 1);
3693 /* If return value is ignored, transform stpcpy into strcpy. */
3694 if (target == const0_rtx)
3696 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3697 if (!fn)
3698 return NULL_RTX;
3700 return expand_expr (build_call_expr (fn, 2, dst, src),
3701 target, mode, EXPAND_NORMAL);
3703 else
3705 tree len, lenp1;
3706 rtx ret;
3708 /* Ensure we get an actual string whose length can be evaluated at
3709 compile-time, not an expression containing a string. This is
3710 because the latter will potentially produce pessimized code
3711 when used to produce the return value. */
3712 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3713 return expand_movstr (dst, src, target, /*endp=*/2);
3715 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3716 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3717 target, mode, /*endp=*/2);
3719 if (ret)
3720 return ret;
3722 if (TREE_CODE (len) == INTEGER_CST)
3724 rtx len_rtx = expand_normal (len);
3726 if (GET_CODE (len_rtx) == CONST_INT)
3728 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3729 dst, src, target, mode);
3731 if (ret)
3733 if (! target)
3735 if (mode != VOIDmode)
3736 target = gen_reg_rtx (mode);
3737 else
3738 target = gen_reg_rtx (GET_MODE (ret));
3740 if (GET_MODE (target) != GET_MODE (ret))
3741 ret = gen_lowpart (GET_MODE (target), ret);
3743 ret = plus_constant (ret, INTVAL (len_rtx));
3744 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3745 gcc_assert (ret);
3747 return target;
3752 return expand_movstr (dst, src, target, /*endp=*/2);
3756 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3757 bytes from constant string DATA + OFFSET and return it as target
3758 constant. */
3761 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3762 enum machine_mode mode)
3764 const char *str = (const char *) data;
3766 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3767 return const0_rtx;
3769 return c_readstr (str + offset, mode);
3772 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3773 NULL_RTX if we failed the caller should emit a normal call. */
3775 static rtx
3776 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3778 tree fndecl = get_callee_fndecl (exp);
3780 if (validate_arglist (exp,
3781 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3783 tree dest = CALL_EXPR_ARG (exp, 0);
3784 tree src = CALL_EXPR_ARG (exp, 1);
3785 tree len = CALL_EXPR_ARG (exp, 2);
3786 tree slen = c_strlen (src, 1);
3787 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3789 if (result)
3791 while (TREE_CODE (result) == COMPOUND_EXPR)
3793 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3794 EXPAND_NORMAL);
3795 result = TREE_OPERAND (result, 1);
3797 return expand_expr (result, target, mode, EXPAND_NORMAL);
3800 /* We must be passed a constant len and src parameter. */
3801 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3802 return NULL_RTX;
3804 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3806 /* We're required to pad with trailing zeros if the requested
3807 len is greater than strlen(s2)+1. In that case try to
3808 use store_by_pieces, if it fails, punt. */
3809 if (tree_int_cst_lt (slen, len))
3811 unsigned int dest_align
3812 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3813 const char *p = c_getstr (src);
3814 rtx dest_mem;
3816 if (!p || dest_align == 0 || !host_integerp (len, 1)
3817 || !can_store_by_pieces (tree_low_cst (len, 1),
3818 builtin_strncpy_read_str,
3819 (void *) p, dest_align, false))
3820 return NULL_RTX;
3822 dest_mem = get_memory_rtx (dest, len);
3823 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3824 builtin_strncpy_read_str,
3825 (void *) p, dest_align, false, 0);
3826 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3827 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3828 return dest_mem;
3831 return NULL_RTX;
3834 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3835 bytes from constant string DATA + OFFSET and return it as target
3836 constant. */
3839 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3840 enum machine_mode mode)
3842 const char *c = (const char *) data;
3843 char *p = alloca (GET_MODE_SIZE (mode));
3845 memset (p, *c, GET_MODE_SIZE (mode));
3847 return c_readstr (p, mode);
3850 /* Callback routine for store_by_pieces. Return the RTL of a register
3851 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3852 char value given in the RTL register data. For example, if mode is
3853 4 bytes wide, return the RTL for 0x01010101*data. */
3855 static rtx
3856 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3857 enum machine_mode mode)
3859 rtx target, coeff;
3860 size_t size;
3861 char *p;
3863 size = GET_MODE_SIZE (mode);
3864 if (size == 1)
3865 return (rtx) data;
3867 p = alloca (size);
3868 memset (p, 1, size);
3869 coeff = c_readstr (p, mode);
3871 target = convert_to_mode (mode, (rtx) data, 1);
3872 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3873 return force_reg (mode, target);
3876 /* Expand expression EXP, which is a call to the memset builtin. Return
3877 NULL_RTX if we failed the caller should emit a normal call, otherwise
3878 try to get the result in TARGET, if convenient (and in mode MODE if that's
3879 convenient). */
3881 static rtx
3882 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3884 if (!validate_arglist (exp,
3885 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3886 return NULL_RTX;
3887 else
3889 tree dest = CALL_EXPR_ARG (exp, 0);
3890 tree val = CALL_EXPR_ARG (exp, 1);
3891 tree len = CALL_EXPR_ARG (exp, 2);
3892 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3896 /* Helper function to do the actual work for expand_builtin_memset. The
3897 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3898 so that this can also be called without constructing an actual CALL_EXPR.
3899 The other arguments and return value are the same as for
3900 expand_builtin_memset. */
3902 static rtx
3903 expand_builtin_memset_args (tree dest, tree val, tree len,
3904 rtx target, enum machine_mode mode, tree orig_exp)
3906 tree fndecl, fn;
3907 enum built_in_function fcode;
3908 char c;
3909 unsigned int dest_align;
3910 rtx dest_mem, dest_addr, len_rtx;
3911 HOST_WIDE_INT expected_size = -1;
3912 unsigned int expected_align = 0;
3914 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3916 /* If DEST is not a pointer type, don't do this operation in-line. */
3917 if (dest_align == 0)
3918 return NULL_RTX;
3920 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3921 if (expected_align < dest_align)
3922 expected_align = dest_align;
3924 /* If the LEN parameter is zero, return DEST. */
3925 if (integer_zerop (len))
3927 /* Evaluate and ignore VAL in case it has side-effects. */
3928 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3929 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3932 /* Stabilize the arguments in case we fail. */
3933 dest = builtin_save_expr (dest);
3934 val = builtin_save_expr (val);
3935 len = builtin_save_expr (len);
3937 len_rtx = expand_normal (len);
3938 dest_mem = get_memory_rtx (dest, len);
3940 if (TREE_CODE (val) != INTEGER_CST)
3942 rtx val_rtx;
3944 val_rtx = expand_normal (val);
3945 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3946 val_rtx, 0);
3948 /* Assume that we can memset by pieces if we can store
3949 * the coefficients by pieces (in the required modes).
3950 * We can't pass builtin_memset_gen_str as that emits RTL. */
3951 c = 1;
3952 if (host_integerp (len, 1)
3953 && can_store_by_pieces (tree_low_cst (len, 1),
3954 builtin_memset_read_str, &c, dest_align,
3955 true))
3957 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3958 val_rtx);
3959 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3960 builtin_memset_gen_str, val_rtx, dest_align,
3961 true, 0);
3963 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3964 dest_align, expected_align,
3965 expected_size))
3966 goto do_libcall;
3968 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3969 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3970 return dest_mem;
3973 if (target_char_cast (val, &c))
3974 goto do_libcall;
3976 if (c)
3978 if (host_integerp (len, 1)
3979 && can_store_by_pieces (tree_low_cst (len, 1),
3980 builtin_memset_read_str, &c, dest_align,
3981 true))
3982 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3983 builtin_memset_read_str, &c, dest_align, true, 0);
3984 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3985 dest_align, expected_align,
3986 expected_size))
3987 goto do_libcall;
3989 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3990 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3991 return dest_mem;
3994 set_mem_align (dest_mem, dest_align);
3995 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3996 CALL_EXPR_TAILCALL (orig_exp)
3997 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3998 expected_align, expected_size);
4000 if (dest_addr == 0)
4002 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4003 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4006 return dest_addr;
4008 do_libcall:
4009 fndecl = get_callee_fndecl (orig_exp);
4010 fcode = DECL_FUNCTION_CODE (fndecl);
4011 if (fcode == BUILT_IN_MEMSET)
4012 fn = build_call_expr (fndecl, 3, dest, val, len);
4013 else if (fcode == BUILT_IN_BZERO)
4014 fn = build_call_expr (fndecl, 2, dest, len);
4015 else
4016 gcc_unreachable ();
4017 if (TREE_CODE (fn) == CALL_EXPR)
4018 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4019 return expand_call (fn, target, target == const0_rtx);
4022 /* Expand expression EXP, which is a call to the bzero builtin. Return
4023 NULL_RTX if we failed the caller should emit a normal call. */
4025 static rtx
4026 expand_builtin_bzero (tree exp)
4028 tree dest, size;
4030 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4031 return NULL_RTX;
4033 dest = CALL_EXPR_ARG (exp, 0);
4034 size = CALL_EXPR_ARG (exp, 1);
4036 /* New argument list transforming bzero(ptr x, int y) to
4037 memset(ptr x, int 0, size_t y). This is done this way
4038 so that if it isn't expanded inline, we fallback to
4039 calling bzero instead of memset. */
4041 return expand_builtin_memset_args (dest, integer_zero_node,
4042 fold_convert (sizetype, size),
4043 const0_rtx, VOIDmode, exp);
4046 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4047 caller should emit a normal call, otherwise try to get the result
4048 in TARGET, if convenient (and in mode MODE if that's convenient). */
4050 static rtx
4051 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4053 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4054 INTEGER_TYPE, VOID_TYPE))
4056 tree type = TREE_TYPE (exp);
4057 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4058 CALL_EXPR_ARG (exp, 1),
4059 CALL_EXPR_ARG (exp, 2), type);
4060 if (result)
4061 return expand_expr (result, target, mode, EXPAND_NORMAL);
4063 return NULL_RTX;
4066 /* Expand expression EXP, which is a call to the memcmp built-in function.
4067 Return NULL_RTX if we failed and the
4068 caller should emit a normal call, otherwise try to get the result in
4069 TARGET, if convenient (and in mode MODE, if that's convenient). */
4071 static rtx
4072 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4074 if (!validate_arglist (exp,
4075 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4076 return NULL_RTX;
4077 else
4079 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4080 CALL_EXPR_ARG (exp, 1),
4081 CALL_EXPR_ARG (exp, 2));
4082 if (result)
4083 return expand_expr (result, target, mode, EXPAND_NORMAL);
4086 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4088 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4089 rtx result;
4090 rtx insn;
4091 tree arg1 = CALL_EXPR_ARG (exp, 0);
4092 tree arg2 = CALL_EXPR_ARG (exp, 1);
4093 tree len = CALL_EXPR_ARG (exp, 2);
4095 int arg1_align
4096 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4097 int arg2_align
4098 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4099 enum machine_mode insn_mode;
4101 #ifdef HAVE_cmpmemsi
4102 if (HAVE_cmpmemsi)
4103 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4104 else
4105 #endif
4106 #ifdef HAVE_cmpstrnsi
4107 if (HAVE_cmpstrnsi)
4108 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4109 else
4110 #endif
4111 return NULL_RTX;
4113 /* If we don't have POINTER_TYPE, call the function. */
4114 if (arg1_align == 0 || arg2_align == 0)
4115 return NULL_RTX;
4117 /* Make a place to write the result of the instruction. */
4118 result = target;
4119 if (! (result != 0
4120 && REG_P (result) && GET_MODE (result) == insn_mode
4121 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4122 result = gen_reg_rtx (insn_mode);
4124 arg1_rtx = get_memory_rtx (arg1, len);
4125 arg2_rtx = get_memory_rtx (arg2, len);
4126 arg3_rtx = expand_normal (len);
4128 /* Set MEM_SIZE as appropriate. */
4129 if (GET_CODE (arg3_rtx) == CONST_INT)
4131 set_mem_size (arg1_rtx, arg3_rtx);
4132 set_mem_size (arg2_rtx, arg3_rtx);
4135 #ifdef HAVE_cmpmemsi
4136 if (HAVE_cmpmemsi)
4137 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4138 GEN_INT (MIN (arg1_align, arg2_align)));
4139 else
4140 #endif
4141 #ifdef HAVE_cmpstrnsi
4142 if (HAVE_cmpstrnsi)
4143 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4144 GEN_INT (MIN (arg1_align, arg2_align)));
4145 else
4146 #endif
4147 gcc_unreachable ();
4149 if (insn)
4150 emit_insn (insn);
4151 else
4152 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4153 TYPE_MODE (integer_type_node), 3,
4154 XEXP (arg1_rtx, 0), Pmode,
4155 XEXP (arg2_rtx, 0), Pmode,
4156 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4157 TYPE_UNSIGNED (sizetype)),
4158 TYPE_MODE (sizetype));
4160 /* Return the value in the proper mode for this function. */
4161 mode = TYPE_MODE (TREE_TYPE (exp));
4162 if (GET_MODE (result) == mode)
4163 return result;
4164 else if (target != 0)
4166 convert_move (target, result, 0);
4167 return target;
4169 else
4170 return convert_to_mode (mode, result, 0);
4172 #endif
4174 return NULL_RTX;
4177 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4178 if we failed the caller should emit a normal call, otherwise try to get
4179 the result in TARGET, if convenient. */
4181 static rtx
4182 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4184 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4185 return NULL_RTX;
4186 else
4188 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4189 CALL_EXPR_ARG (exp, 1));
4190 if (result)
4191 return expand_expr (result, target, mode, EXPAND_NORMAL);
4194 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4195 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4196 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4198 rtx arg1_rtx, arg2_rtx;
4199 rtx result, insn = NULL_RTX;
4200 tree fndecl, fn;
4201 tree arg1 = CALL_EXPR_ARG (exp, 0);
4202 tree arg2 = CALL_EXPR_ARG (exp, 1);
4204 int arg1_align
4205 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4206 int arg2_align
4207 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4209 /* If we don't have POINTER_TYPE, call the function. */
4210 if (arg1_align == 0 || arg2_align == 0)
4211 return NULL_RTX;
4213 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4214 arg1 = builtin_save_expr (arg1);
4215 arg2 = builtin_save_expr (arg2);
4217 arg1_rtx = get_memory_rtx (arg1, NULL);
4218 arg2_rtx = get_memory_rtx (arg2, NULL);
4220 #ifdef HAVE_cmpstrsi
4221 /* Try to call cmpstrsi. */
4222 if (HAVE_cmpstrsi)
4224 enum machine_mode insn_mode
4225 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4227 /* Make a place to write the result of the instruction. */
4228 result = target;
4229 if (! (result != 0
4230 && REG_P (result) && GET_MODE (result) == insn_mode
4231 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4232 result = gen_reg_rtx (insn_mode);
4234 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4235 GEN_INT (MIN (arg1_align, arg2_align)));
4237 #endif
4238 #ifdef HAVE_cmpstrnsi
4239 /* Try to determine at least one length and call cmpstrnsi. */
4240 if (!insn && HAVE_cmpstrnsi)
4242 tree len;
4243 rtx arg3_rtx;
4245 enum machine_mode insn_mode
4246 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4247 tree len1 = c_strlen (arg1, 1);
4248 tree len2 = c_strlen (arg2, 1);
4250 if (len1)
4251 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4252 if (len2)
4253 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4255 /* If we don't have a constant length for the first, use the length
4256 of the second, if we know it. We don't require a constant for
4257 this case; some cost analysis could be done if both are available
4258 but neither is constant. For now, assume they're equally cheap,
4259 unless one has side effects. If both strings have constant lengths,
4260 use the smaller. */
4262 if (!len1)
4263 len = len2;
4264 else if (!len2)
4265 len = len1;
4266 else if (TREE_SIDE_EFFECTS (len1))
4267 len = len2;
4268 else if (TREE_SIDE_EFFECTS (len2))
4269 len = len1;
4270 else if (TREE_CODE (len1) != INTEGER_CST)
4271 len = len2;
4272 else if (TREE_CODE (len2) != INTEGER_CST)
4273 len = len1;
4274 else if (tree_int_cst_lt (len1, len2))
4275 len = len1;
4276 else
4277 len = len2;
4279 /* If both arguments have side effects, we cannot optimize. */
4280 if (!len || TREE_SIDE_EFFECTS (len))
4281 goto do_libcall;
4283 arg3_rtx = expand_normal (len);
4285 /* Make a place to write the result of the instruction. */
4286 result = target;
4287 if (! (result != 0
4288 && REG_P (result) && GET_MODE (result) == insn_mode
4289 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4290 result = gen_reg_rtx (insn_mode);
4292 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4293 GEN_INT (MIN (arg1_align, arg2_align)));
4295 #endif
4297 if (insn)
4299 emit_insn (insn);
4301 /* Return the value in the proper mode for this function. */
4302 mode = TYPE_MODE (TREE_TYPE (exp));
4303 if (GET_MODE (result) == mode)
4304 return result;
4305 if (target == 0)
4306 return convert_to_mode (mode, result, 0);
4307 convert_move (target, result, 0);
4308 return target;
4311 /* Expand the library call ourselves using a stabilized argument
4312 list to avoid re-evaluating the function's arguments twice. */
4313 #ifdef HAVE_cmpstrnsi
4314 do_libcall:
4315 #endif
4316 fndecl = get_callee_fndecl (exp);
4317 fn = build_call_expr (fndecl, 2, arg1, arg2);
4318 if (TREE_CODE (fn) == CALL_EXPR)
4319 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4320 return expand_call (fn, target, target == const0_rtx);
4322 #endif
4323 return NULL_RTX;
4326 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4327 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4328 the result in TARGET, if convenient. */
4330 static rtx
4331 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4333 if (!validate_arglist (exp,
4334 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4335 return NULL_RTX;
4336 else
4338 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4339 CALL_EXPR_ARG (exp, 1),
4340 CALL_EXPR_ARG (exp, 2));
4341 if (result)
4342 return expand_expr (result, target, mode, EXPAND_NORMAL);
4345 /* If c_strlen can determine an expression for one of the string
4346 lengths, and it doesn't have side effects, then emit cmpstrnsi
4347 using length MIN(strlen(string)+1, arg3). */
4348 #ifdef HAVE_cmpstrnsi
4349 if (HAVE_cmpstrnsi)
4351 tree len, len1, len2;
4352 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4353 rtx result, insn;
4354 tree fndecl, fn;
4355 tree arg1 = CALL_EXPR_ARG (exp, 0);
4356 tree arg2 = CALL_EXPR_ARG (exp, 1);
4357 tree arg3 = CALL_EXPR_ARG (exp, 2);
4359 int arg1_align
4360 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4361 int arg2_align
4362 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4363 enum machine_mode insn_mode
4364 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4366 len1 = c_strlen (arg1, 1);
4367 len2 = c_strlen (arg2, 1);
4369 if (len1)
4370 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4371 if (len2)
4372 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4374 /* If we don't have a constant length for the first, use the length
4375 of the second, if we know it. We don't require a constant for
4376 this case; some cost analysis could be done if both are available
4377 but neither is constant. For now, assume they're equally cheap,
4378 unless one has side effects. If both strings have constant lengths,
4379 use the smaller. */
4381 if (!len1)
4382 len = len2;
4383 else if (!len2)
4384 len = len1;
4385 else if (TREE_SIDE_EFFECTS (len1))
4386 len = len2;
4387 else if (TREE_SIDE_EFFECTS (len2))
4388 len = len1;
4389 else if (TREE_CODE (len1) != INTEGER_CST)
4390 len = len2;
4391 else if (TREE_CODE (len2) != INTEGER_CST)
4392 len = len1;
4393 else if (tree_int_cst_lt (len1, len2))
4394 len = len1;
4395 else
4396 len = len2;
4398 /* If both arguments have side effects, we cannot optimize. */
4399 if (!len || TREE_SIDE_EFFECTS (len))
4400 return NULL_RTX;
4402 /* The actual new length parameter is MIN(len,arg3). */
4403 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4404 fold_convert (TREE_TYPE (len), arg3));
4406 /* If we don't have POINTER_TYPE, call the function. */
4407 if (arg1_align == 0 || arg2_align == 0)
4408 return NULL_RTX;
4410 /* Make a place to write the result of the instruction. */
4411 result = target;
4412 if (! (result != 0
4413 && REG_P (result) && GET_MODE (result) == insn_mode
4414 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4415 result = gen_reg_rtx (insn_mode);
4417 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4418 arg1 = builtin_save_expr (arg1);
4419 arg2 = builtin_save_expr (arg2);
4420 len = builtin_save_expr (len);
4422 arg1_rtx = get_memory_rtx (arg1, len);
4423 arg2_rtx = get_memory_rtx (arg2, len);
4424 arg3_rtx = expand_normal (len);
4425 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4426 GEN_INT (MIN (arg1_align, arg2_align)));
4427 if (insn)
4429 emit_insn (insn);
4431 /* Return the value in the proper mode for this function. */
4432 mode = TYPE_MODE (TREE_TYPE (exp));
4433 if (GET_MODE (result) == mode)
4434 return result;
4435 if (target == 0)
4436 return convert_to_mode (mode, result, 0);
4437 convert_move (target, result, 0);
4438 return target;
4441 /* Expand the library call ourselves using a stabilized argument
4442 list to avoid re-evaluating the function's arguments twice. */
4443 fndecl = get_callee_fndecl (exp);
4444 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4445 if (TREE_CODE (fn) == CALL_EXPR)
4446 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4447 return expand_call (fn, target, target == const0_rtx);
4449 #endif
4450 return NULL_RTX;
4453 /* Expand expression EXP, which is a call to the strcat builtin.
4454 Return NULL_RTX if we failed the caller should emit a normal call,
4455 otherwise try to get the result in TARGET, if convenient. */
4457 static rtx
4458 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4460 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4461 return NULL_RTX;
4462 else
4464 tree dst = CALL_EXPR_ARG (exp, 0);
4465 tree src = CALL_EXPR_ARG (exp, 1);
4466 const char *p = c_getstr (src);
4468 /* If the string length is zero, return the dst parameter. */
4469 if (p && *p == '\0')
4470 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4472 if (!optimize_size)
4474 /* See if we can store by pieces into (dst + strlen(dst)). */
4475 tree newsrc, newdst,
4476 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4477 rtx insns;
4479 /* Stabilize the argument list. */
4480 newsrc = builtin_save_expr (src);
4481 dst = builtin_save_expr (dst);
4483 start_sequence ();
4485 /* Create strlen (dst). */
4486 newdst = build_call_expr (strlen_fn, 1, dst);
4487 /* Create (dst p+ strlen (dst)). */
4489 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4490 newdst = builtin_save_expr (newdst);
4492 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4494 end_sequence (); /* Stop sequence. */
4495 return NULL_RTX;
4498 /* Output the entire sequence. */
4499 insns = get_insns ();
4500 end_sequence ();
4501 emit_insn (insns);
4503 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4506 return NULL_RTX;
4510 /* Expand expression EXP, which is a call to the strncat builtin.
4511 Return NULL_RTX if we failed the caller should emit a normal call,
4512 otherwise try to get the result in TARGET, if convenient. */
4514 static rtx
4515 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4517 if (validate_arglist (exp,
4518 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4520 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4521 CALL_EXPR_ARG (exp, 1),
4522 CALL_EXPR_ARG (exp, 2));
4523 if (result)
4524 return expand_expr (result, target, mode, EXPAND_NORMAL);
4526 return NULL_RTX;
4529 /* Expand expression EXP, which is a call to the strspn builtin.
4530 Return NULL_RTX if we failed the caller should emit a normal call,
4531 otherwise try to get the result in TARGET, if convenient. */
4533 static rtx
4534 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4536 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4538 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4539 CALL_EXPR_ARG (exp, 1));
4540 if (result)
4541 return expand_expr (result, target, mode, EXPAND_NORMAL);
4543 return NULL_RTX;
4546 /* Expand expression EXP, which is a call to the strcspn builtin.
4547 Return NULL_RTX if we failed the caller should emit a normal call,
4548 otherwise try to get the result in TARGET, if convenient. */
4550 static rtx
4551 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4553 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4555 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4556 CALL_EXPR_ARG (exp, 1));
4557 if (result)
4558 return expand_expr (result, target, mode, EXPAND_NORMAL);
4560 return NULL_RTX;
4563 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4564 if that's convenient. */
4567 expand_builtin_saveregs (void)
4569 rtx val, seq;
4571 /* Don't do __builtin_saveregs more than once in a function.
4572 Save the result of the first call and reuse it. */
4573 if (saveregs_value != 0)
4574 return saveregs_value;
4576 /* When this function is called, it means that registers must be
4577 saved on entry to this function. So we migrate the call to the
4578 first insn of this function. */
4580 start_sequence ();
4582 /* Do whatever the machine needs done in this case. */
4583 val = targetm.calls.expand_builtin_saveregs ();
4585 seq = get_insns ();
4586 end_sequence ();
4588 saveregs_value = val;
4590 /* Put the insns after the NOTE that starts the function. If this
4591 is inside a start_sequence, make the outer-level insn chain current, so
4592 the code is placed at the start of the function. */
4593 push_topmost_sequence ();
4594 emit_insn_after (seq, entry_of_function ());
4595 pop_topmost_sequence ();
4597 return val;
4600 /* __builtin_args_info (N) returns word N of the arg space info
4601 for the current function. The number and meanings of words
4602 is controlled by the definition of CUMULATIVE_ARGS. */
4604 static rtx
4605 expand_builtin_args_info (tree exp)
4607 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4608 int *word_ptr = (int *) &crtl->args.info;
4610 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4612 if (call_expr_nargs (exp) != 0)
4614 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4615 error ("argument of %<__builtin_args_info%> must be constant");
4616 else
4618 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4620 if (wordnum < 0 || wordnum >= nwords)
4621 error ("argument of %<__builtin_args_info%> out of range");
4622 else
4623 return GEN_INT (word_ptr[wordnum]);
4626 else
4627 error ("missing argument in %<__builtin_args_info%>");
4629 return const0_rtx;
4632 /* Expand a call to __builtin_next_arg. */
4634 static rtx
4635 expand_builtin_next_arg (void)
4637 /* Checking arguments is already done in fold_builtin_next_arg
4638 that must be called before this function. */
4639 return expand_binop (ptr_mode, add_optab,
4640 crtl->args.internal_arg_pointer,
4641 crtl->args.arg_offset_rtx,
4642 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4645 /* Make it easier for the backends by protecting the valist argument
4646 from multiple evaluations. */
4648 static tree
4649 stabilize_va_list (tree valist, int needs_lvalue)
4651 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4653 if (TREE_SIDE_EFFECTS (valist))
4654 valist = save_expr (valist);
4656 /* For this case, the backends will be expecting a pointer to
4657 TREE_TYPE (va_list_type_node), but it's possible we've
4658 actually been given an array (an actual va_list_type_node).
4659 So fix it. */
4660 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4662 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4663 valist = build_fold_addr_expr_with_type (valist, p1);
4666 else
4668 tree pt;
4670 if (! needs_lvalue)
4672 if (! TREE_SIDE_EFFECTS (valist))
4673 return valist;
4675 pt = build_pointer_type (va_list_type_node);
4676 valist = fold_build1 (ADDR_EXPR, pt, valist);
4677 TREE_SIDE_EFFECTS (valist) = 1;
4680 if (TREE_SIDE_EFFECTS (valist))
4681 valist = save_expr (valist);
4682 valist = build_fold_indirect_ref (valist);
4685 return valist;
4688 /* The "standard" definition of va_list is void*. */
4690 tree
4691 std_build_builtin_va_list (void)
4693 return ptr_type_node;
4696 /* The "standard" implementation of va_start: just assign `nextarg' to
4697 the variable. */
4699 void
4700 std_expand_builtin_va_start (tree valist, rtx nextarg)
4702 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4703 convert_move (va_r, nextarg, 0);
4706 /* Expand EXP, a call to __builtin_va_start. */
4708 static rtx
4709 expand_builtin_va_start (tree exp)
4711 rtx nextarg;
4712 tree valist;
4714 if (call_expr_nargs (exp) < 2)
4716 error ("too few arguments to function %<va_start%>");
4717 return const0_rtx;
4720 if (fold_builtin_next_arg (exp, true))
4721 return const0_rtx;
4723 nextarg = expand_builtin_next_arg ();
4724 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4726 if (targetm.expand_builtin_va_start)
4727 targetm.expand_builtin_va_start (valist, nextarg);
4728 else
4729 std_expand_builtin_va_start (valist, nextarg);
4731 return const0_rtx;
4734 /* The "standard" implementation of va_arg: read the value from the
4735 current (padded) address and increment by the (padded) size. */
4737 tree
4738 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4740 tree addr, t, type_size, rounded_size, valist_tmp;
4741 unsigned HOST_WIDE_INT align, boundary;
4742 bool indirect;
4744 #ifdef ARGS_GROW_DOWNWARD
4745 /* All of the alignment and movement below is for args-grow-up machines.
4746 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4747 implement their own specialized gimplify_va_arg_expr routines. */
4748 gcc_unreachable ();
4749 #endif
4751 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4752 if (indirect)
4753 type = build_pointer_type (type);
4755 align = PARM_BOUNDARY / BITS_PER_UNIT;
4756 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4758 /* Hoist the valist value into a temporary for the moment. */
4759 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4761 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4762 requires greater alignment, we must perform dynamic alignment. */
4763 if (boundary > align
4764 && !integer_zerop (TYPE_SIZE (type)))
4766 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4767 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4768 valist_tmp, size_int (boundary - 1)));
4769 gimplify_and_add (t, pre_p);
4771 t = fold_convert (sizetype, valist_tmp);
4772 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4773 fold_convert (TREE_TYPE (valist),
4774 fold_build2 (BIT_AND_EXPR, sizetype, t,
4775 size_int (-boundary))));
4776 gimplify_and_add (t, pre_p);
4778 else
4779 boundary = align;
4781 /* If the actual alignment is less than the alignment of the type,
4782 adjust the type accordingly so that we don't assume strict alignment
4783 when deferencing the pointer. */
4784 boundary *= BITS_PER_UNIT;
4785 if (boundary < TYPE_ALIGN (type))
4787 type = build_variant_type_copy (type);
4788 TYPE_ALIGN (type) = boundary;
4791 /* Compute the rounded size of the type. */
4792 type_size = size_in_bytes (type);
4793 rounded_size = round_up (type_size, align);
4795 /* Reduce rounded_size so it's sharable with the postqueue. */
4796 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4798 /* Get AP. */
4799 addr = valist_tmp;
4800 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4802 /* Small args are padded downward. */
4803 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4804 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4805 size_binop (MINUS_EXPR, rounded_size, type_size));
4806 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4809 /* Compute new value for AP. */
4810 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4811 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4812 gimplify_and_add (t, pre_p);
4814 addr = fold_convert (build_pointer_type (type), addr);
4816 if (indirect)
4817 addr = build_va_arg_indirect_ref (addr);
4819 return build_va_arg_indirect_ref (addr);
4822 /* Build an indirect-ref expression over the given TREE, which represents a
4823 piece of a va_arg() expansion. */
4824 tree
4825 build_va_arg_indirect_ref (tree addr)
4827 addr = build_fold_indirect_ref (addr);
4829 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4830 mf_mark (addr);
4832 return addr;
4835 /* Return a dummy expression of type TYPE in order to keep going after an
4836 error. */
4838 static tree
4839 dummy_object (tree type)
4841 tree t = build_int_cst (build_pointer_type (type), 0);
4842 return build1 (INDIRECT_REF, type, t);
4845 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4846 builtin function, but a very special sort of operator. */
4848 enum gimplify_status
4849 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4851 tree promoted_type, want_va_type, have_va_type;
4852 tree valist = TREE_OPERAND (*expr_p, 0);
4853 tree type = TREE_TYPE (*expr_p);
4854 tree t;
4856 /* Verify that valist is of the proper type. */
4857 want_va_type = va_list_type_node;
4858 have_va_type = TREE_TYPE (valist);
4860 if (have_va_type == error_mark_node)
4861 return GS_ERROR;
4863 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4865 /* If va_list is an array type, the argument may have decayed
4866 to a pointer type, e.g. by being passed to another function.
4867 In that case, unwrap both types so that we can compare the
4868 underlying records. */
4869 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4870 || POINTER_TYPE_P (have_va_type))
4872 want_va_type = TREE_TYPE (want_va_type);
4873 have_va_type = TREE_TYPE (have_va_type);
4877 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4879 error ("first argument to %<va_arg%> not of type %<va_list%>");
4880 return GS_ERROR;
4883 /* Generate a diagnostic for requesting data of a type that cannot
4884 be passed through `...' due to type promotion at the call site. */
4885 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4886 != type)
4888 static bool gave_help;
4890 /* Unfortunately, this is merely undefined, rather than a constraint
4891 violation, so we cannot make this an error. If this call is never
4892 executed, the program is still strictly conforming. */
4893 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4894 type, promoted_type);
4895 if (! gave_help)
4897 gave_help = true;
4898 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4899 promoted_type, type);
4902 /* We can, however, treat "undefined" any way we please.
4903 Call abort to encourage the user to fix the program. */
4904 inform ("if this code is reached, the program will abort");
4905 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4906 append_to_statement_list (t, pre_p);
4908 /* This is dead code, but go ahead and finish so that the
4909 mode of the result comes out right. */
4910 *expr_p = dummy_object (type);
4911 return GS_ALL_DONE;
4913 else
4915 /* Make it easier for the backends by protecting the valist argument
4916 from multiple evaluations. */
4917 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4919 /* For this case, the backends will be expecting a pointer to
4920 TREE_TYPE (va_list_type_node), but it's possible we've
4921 actually been given an array (an actual va_list_type_node).
4922 So fix it. */
4923 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4925 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4926 valist = build_fold_addr_expr_with_type (valist, p1);
4928 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4930 else
4931 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4933 if (!targetm.gimplify_va_arg_expr)
4934 /* FIXME:Once most targets are converted we should merely
4935 assert this is non-null. */
4936 return GS_ALL_DONE;
4938 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4939 return GS_OK;
4943 /* Expand EXP, a call to __builtin_va_end. */
4945 static rtx
4946 expand_builtin_va_end (tree exp)
4948 tree valist = CALL_EXPR_ARG (exp, 0);
4950 /* Evaluate for side effects, if needed. I hate macros that don't
4951 do that. */
4952 if (TREE_SIDE_EFFECTS (valist))
4953 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4955 return const0_rtx;
4958 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4959 builtin rather than just as an assignment in stdarg.h because of the
4960 nastiness of array-type va_list types. */
4962 static rtx
4963 expand_builtin_va_copy (tree exp)
4965 tree dst, src, t;
4967 dst = CALL_EXPR_ARG (exp, 0);
4968 src = CALL_EXPR_ARG (exp, 1);
4970 dst = stabilize_va_list (dst, 1);
4971 src = stabilize_va_list (src, 0);
4973 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4975 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4976 TREE_SIDE_EFFECTS (t) = 1;
4977 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4979 else
4981 rtx dstb, srcb, size;
4983 /* Evaluate to pointers. */
4984 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4985 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4986 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4987 VOIDmode, EXPAND_NORMAL);
4989 dstb = convert_memory_address (Pmode, dstb);
4990 srcb = convert_memory_address (Pmode, srcb);
4992 /* "Dereference" to BLKmode memories. */
4993 dstb = gen_rtx_MEM (BLKmode, dstb);
4994 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4995 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4996 srcb = gen_rtx_MEM (BLKmode, srcb);
4997 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4998 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
5000 /* Copy. */
5001 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5004 return const0_rtx;
5007 /* Expand a call to one of the builtin functions __builtin_frame_address or
5008 __builtin_return_address. */
5010 static rtx
5011 expand_builtin_frame_address (tree fndecl, tree exp)
5013 /* The argument must be a nonnegative integer constant.
5014 It counts the number of frames to scan up the stack.
5015 The value is the return address saved in that frame. */
5016 if (call_expr_nargs (exp) == 0)
5017 /* Warning about missing arg was already issued. */
5018 return const0_rtx;
5019 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5021 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5022 error ("invalid argument to %<__builtin_frame_address%>");
5023 else
5024 error ("invalid argument to %<__builtin_return_address%>");
5025 return const0_rtx;
5027 else
5029 rtx tem
5030 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5031 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5033 /* Some ports cannot access arbitrary stack frames. */
5034 if (tem == NULL)
5036 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5037 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5038 else
5039 warning (0, "unsupported argument to %<__builtin_return_address%>");
5040 return const0_rtx;
5043 /* For __builtin_frame_address, return what we've got. */
5044 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5045 return tem;
5047 if (!REG_P (tem)
5048 && ! CONSTANT_P (tem))
5049 tem = copy_to_mode_reg (Pmode, tem);
5050 return tem;
5054 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5055 we failed and the caller should emit a normal call, otherwise try to get
5056 the result in TARGET, if convenient. */
5058 static rtx
5059 expand_builtin_alloca (tree exp, rtx target)
5061 rtx op0;
5062 rtx result;
5064 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5065 should always expand to function calls. These can be intercepted
5066 in libmudflap. */
5067 if (flag_mudflap)
5068 return NULL_RTX;
5070 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5071 return NULL_RTX;
5073 /* Compute the argument. */
5074 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5076 /* Allocate the desired space. */
5077 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5078 result = convert_memory_address (ptr_mode, result);
5080 return result;
5083 /* Expand a call to a bswap builtin with argument ARG0. MODE
5084 is the mode to expand with. */
5086 static rtx
5087 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5089 enum machine_mode mode;
5090 tree arg;
5091 rtx op0;
5093 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5094 return NULL_RTX;
5096 arg = CALL_EXPR_ARG (exp, 0);
5097 mode = TYPE_MODE (TREE_TYPE (arg));
5098 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5100 target = expand_unop (mode, bswap_optab, op0, target, 1);
5102 gcc_assert (target);
5104 return convert_to_mode (mode, target, 0);
5107 /* Expand a call to a unary builtin in EXP.
5108 Return NULL_RTX if a normal call should be emitted rather than expanding the
5109 function in-line. If convenient, the result should be placed in TARGET.
5110 SUBTARGET may be used as the target for computing one of EXP's operands. */
5112 static rtx
5113 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5114 rtx subtarget, optab op_optab)
5116 rtx op0;
5118 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5119 return NULL_RTX;
5121 /* Compute the argument. */
5122 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5123 VOIDmode, EXPAND_NORMAL);
5124 /* Compute op, into TARGET if possible.
5125 Set TARGET to wherever the result comes back. */
5126 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5127 op_optab, op0, target, 1);
5128 gcc_assert (target);
5130 return convert_to_mode (target_mode, target, 0);
5133 /* If the string passed to fputs is a constant and is one character
5134 long, we attempt to transform this call into __builtin_fputc(). */
5136 static rtx
5137 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5139 /* Verify the arguments in the original call. */
5140 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5142 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5143 CALL_EXPR_ARG (exp, 1),
5144 (target == const0_rtx),
5145 unlocked, NULL_TREE);
5146 if (result)
5147 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5149 return NULL_RTX;
5152 /* Expand a call to __builtin_expect. We just return our argument
5153 as the builtin_expect semantic should've been already executed by
5154 tree branch prediction pass. */
5156 static rtx
5157 expand_builtin_expect (tree exp, rtx target)
5159 tree arg, c;
5161 if (call_expr_nargs (exp) < 2)
5162 return const0_rtx;
5163 arg = CALL_EXPR_ARG (exp, 0);
5164 c = CALL_EXPR_ARG (exp, 1);
5166 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5167 /* When guessing was done, the hints should be already stripped away. */
5168 gcc_assert (!flag_guess_branch_prob
5169 || optimize == 0 || errorcount || sorrycount);
5170 return target;
5173 void
5174 expand_builtin_trap (void)
5176 #ifdef HAVE_trap
5177 if (HAVE_trap)
5178 emit_insn (gen_trap ());
5179 else
5180 #endif
5181 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5182 emit_barrier ();
5185 /* Expand EXP, a call to fabs, fabsf or fabsl.
5186 Return NULL_RTX if a normal call should be emitted rather than expanding
5187 the function inline. If convenient, the result should be placed
5188 in TARGET. SUBTARGET may be used as the target for computing
5189 the operand. */
5191 static rtx
5192 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5194 enum machine_mode mode;
5195 tree arg;
5196 rtx op0;
5198 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5199 return NULL_RTX;
5201 arg = CALL_EXPR_ARG (exp, 0);
5202 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5203 mode = TYPE_MODE (TREE_TYPE (arg));
5204 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5205 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5208 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5209 Return NULL is a normal call should be emitted rather than expanding the
5210 function inline. If convenient, the result should be placed in TARGET.
5211 SUBTARGET may be used as the target for computing the operand. */
5213 static rtx
5214 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5216 rtx op0, op1;
5217 tree arg;
5219 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5220 return NULL_RTX;
5222 arg = CALL_EXPR_ARG (exp, 0);
5223 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5225 arg = CALL_EXPR_ARG (exp, 1);
5226 op1 = expand_normal (arg);
5228 return expand_copysign (op0, op1, target);
5231 /* Create a new constant string literal and return a char* pointer to it.
5232 The STRING_CST value is the LEN characters at STR. */
5233 tree
5234 build_string_literal (int len, const char *str)
5236 tree t, elem, index, type;
5238 t = build_string (len, str);
5239 elem = build_type_variant (char_type_node, 1, 0);
5240 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5241 type = build_array_type (elem, index);
5242 TREE_TYPE (t) = type;
5243 TREE_CONSTANT (t) = 1;
5244 TREE_READONLY (t) = 1;
5245 TREE_STATIC (t) = 1;
5247 type = build_pointer_type (type);
5248 t = build1 (ADDR_EXPR, type, t);
5250 type = build_pointer_type (elem);
5251 t = build1 (NOP_EXPR, type, t);
5252 return t;
5255 /* Expand EXP, a call to printf or printf_unlocked.
5256 Return NULL_RTX if a normal call should be emitted rather than transforming
5257 the function inline. If convenient, the result should be placed in
5258 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5259 call. */
5260 static rtx
5261 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5262 bool unlocked)
5264 /* If we're using an unlocked function, assume the other unlocked
5265 functions exist explicitly. */
5266 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5267 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5268 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5269 : implicit_built_in_decls[BUILT_IN_PUTS];
5270 const char *fmt_str;
5271 tree fn = 0;
5272 tree fmt, arg;
5273 int nargs = call_expr_nargs (exp);
5275 /* If the return value is used, don't do the transformation. */
5276 if (target != const0_rtx)
5277 return NULL_RTX;
5279 /* Verify the required arguments in the original call. */
5280 if (nargs == 0)
5281 return NULL_RTX;
5282 fmt = CALL_EXPR_ARG (exp, 0);
5283 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5284 return NULL_RTX;
5286 /* Check whether the format is a literal string constant. */
5287 fmt_str = c_getstr (fmt);
5288 if (fmt_str == NULL)
5289 return NULL_RTX;
5291 if (!init_target_chars ())
5292 return NULL_RTX;
5294 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5295 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5297 if ((nargs != 2)
5298 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5299 return NULL_RTX;
5300 if (fn_puts)
5301 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5303 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5304 else if (strcmp (fmt_str, target_percent_c) == 0)
5306 if ((nargs != 2)
5307 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5308 return NULL_RTX;
5309 if (fn_putchar)
5310 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5312 else
5314 /* We can't handle anything else with % args or %% ... yet. */
5315 if (strchr (fmt_str, target_percent))
5316 return NULL_RTX;
5318 if (nargs > 1)
5319 return NULL_RTX;
5321 /* If the format specifier was "", printf does nothing. */
5322 if (fmt_str[0] == '\0')
5323 return const0_rtx;
5324 /* If the format specifier has length of 1, call putchar. */
5325 if (fmt_str[1] == '\0')
5327 /* Given printf("c"), (where c is any one character,)
5328 convert "c"[0] to an int and pass that to the replacement
5329 function. */
5330 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5331 if (fn_putchar)
5332 fn = build_call_expr (fn_putchar, 1, arg);
5334 else
5336 /* If the format specifier was "string\n", call puts("string"). */
5337 size_t len = strlen (fmt_str);
5338 if ((unsigned char)fmt_str[len - 1] == target_newline)
5340 /* Create a NUL-terminated string that's one char shorter
5341 than the original, stripping off the trailing '\n'. */
5342 char *newstr = alloca (len);
5343 memcpy (newstr, fmt_str, len - 1);
5344 newstr[len - 1] = 0;
5345 arg = build_string_literal (len, newstr);
5346 if (fn_puts)
5347 fn = build_call_expr (fn_puts, 1, arg);
5349 else
5350 /* We'd like to arrange to call fputs(string,stdout) here,
5351 but we need stdout and don't have a way to get it yet. */
5352 return NULL_RTX;
5356 if (!fn)
5357 return NULL_RTX;
5358 if (TREE_CODE (fn) == CALL_EXPR)
5359 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5360 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5363 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5364 Return NULL_RTX if a normal call should be emitted rather than transforming
5365 the function inline. If convenient, the result should be placed in
5366 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5367 call. */
5368 static rtx
5369 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5370 bool unlocked)
5372 /* If we're using an unlocked function, assume the other unlocked
5373 functions exist explicitly. */
5374 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5375 : implicit_built_in_decls[BUILT_IN_FPUTC];
5376 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5377 : implicit_built_in_decls[BUILT_IN_FPUTS];
5378 const char *fmt_str;
5379 tree fn = 0;
5380 tree fmt, fp, arg;
5381 int nargs = call_expr_nargs (exp);
5383 /* If the return value is used, don't do the transformation. */
5384 if (target != const0_rtx)
5385 return NULL_RTX;
5387 /* Verify the required arguments in the original call. */
5388 if (nargs < 2)
5389 return NULL_RTX;
5390 fp = CALL_EXPR_ARG (exp, 0);
5391 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5392 return NULL_RTX;
5393 fmt = CALL_EXPR_ARG (exp, 1);
5394 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5395 return NULL_RTX;
5397 /* Check whether the format is a literal string constant. */
5398 fmt_str = c_getstr (fmt);
5399 if (fmt_str == NULL)
5400 return NULL_RTX;
5402 if (!init_target_chars ())
5403 return NULL_RTX;
5405 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5406 if (strcmp (fmt_str, target_percent_s) == 0)
5408 if ((nargs != 3)
5409 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5410 return NULL_RTX;
5411 arg = CALL_EXPR_ARG (exp, 2);
5412 if (fn_fputs)
5413 fn = build_call_expr (fn_fputs, 2, arg, fp);
5415 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5416 else if (strcmp (fmt_str, target_percent_c) == 0)
5418 if ((nargs != 3)
5419 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5420 return NULL_RTX;
5421 arg = CALL_EXPR_ARG (exp, 2);
5422 if (fn_fputc)
5423 fn = build_call_expr (fn_fputc, 2, arg, fp);
5425 else
5427 /* We can't handle anything else with % args or %% ... yet. */
5428 if (strchr (fmt_str, target_percent))
5429 return NULL_RTX;
5431 if (nargs > 2)
5432 return NULL_RTX;
5434 /* If the format specifier was "", fprintf does nothing. */
5435 if (fmt_str[0] == '\0')
5437 /* Evaluate and ignore FILE* argument for side-effects. */
5438 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5439 return const0_rtx;
5442 /* When "string" doesn't contain %, replace all cases of
5443 fprintf(stream,string) with fputs(string,stream). The fputs
5444 builtin will take care of special cases like length == 1. */
5445 if (fn_fputs)
5446 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5449 if (!fn)
5450 return NULL_RTX;
5451 if (TREE_CODE (fn) == CALL_EXPR)
5452 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5453 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5456 /* Expand a call EXP to sprintf. Return NULL_RTX if
5457 a normal call should be emitted rather than expanding the function
5458 inline. If convenient, the result should be placed in TARGET with
5459 mode MODE. */
5461 static rtx
5462 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5464 tree dest, fmt;
5465 const char *fmt_str;
5466 int nargs = call_expr_nargs (exp);
5468 /* Verify the required arguments in the original call. */
5469 if (nargs < 2)
5470 return NULL_RTX;
5471 dest = CALL_EXPR_ARG (exp, 0);
5472 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5473 return NULL_RTX;
5474 fmt = CALL_EXPR_ARG (exp, 0);
5475 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5476 return NULL_RTX;
5478 /* Check whether the format is a literal string constant. */
5479 fmt_str = c_getstr (fmt);
5480 if (fmt_str == NULL)
5481 return NULL_RTX;
5483 if (!init_target_chars ())
5484 return NULL_RTX;
5486 /* If the format doesn't contain % args or %%, use strcpy. */
5487 if (strchr (fmt_str, target_percent) == 0)
5489 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5490 tree exp;
5492 if ((nargs > 2) || ! fn)
5493 return NULL_RTX;
5494 expand_expr (build_call_expr (fn, 2, dest, fmt),
5495 const0_rtx, VOIDmode, EXPAND_NORMAL);
5496 if (target == const0_rtx)
5497 return const0_rtx;
5498 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5499 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5501 /* If the format is "%s", use strcpy if the result isn't used. */
5502 else if (strcmp (fmt_str, target_percent_s) == 0)
5504 tree fn, arg, len;
5505 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5507 if (! fn)
5508 return NULL_RTX;
5509 if (nargs != 3)
5510 return NULL_RTX;
5511 arg = CALL_EXPR_ARG (exp, 2);
5512 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5513 return NULL_RTX;
5515 if (target != const0_rtx)
5517 len = c_strlen (arg, 1);
5518 if (! len || TREE_CODE (len) != INTEGER_CST)
5519 return NULL_RTX;
5521 else
5522 len = NULL_TREE;
5524 expand_expr (build_call_expr (fn, 2, dest, arg),
5525 const0_rtx, VOIDmode, EXPAND_NORMAL);
5527 if (target == const0_rtx)
5528 return const0_rtx;
5529 return expand_expr (len, target, mode, EXPAND_NORMAL);
5532 return NULL_RTX;
5535 /* Expand a call to either the entry or exit function profiler. */
5537 static rtx
5538 expand_builtin_profile_func (bool exitp)
5540 rtx this, which;
5542 this = DECL_RTL (current_function_decl);
5543 gcc_assert (MEM_P (this));
5544 this = XEXP (this, 0);
5546 if (exitp)
5547 which = profile_function_exit_libfunc;
5548 else
5549 which = profile_function_entry_libfunc;
5551 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5552 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5554 Pmode);
5556 return const0_rtx;
5559 /* Expand a call to __builtin___clear_cache. */
5561 static rtx
5562 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5564 #ifndef HAVE_clear_cache
5565 #ifdef CLEAR_INSN_CACHE
5566 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5567 does something. Just do the default expansion to a call to
5568 __clear_cache(). */
5569 return NULL_RTX;
5570 #else
5571 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5572 does nothing. There is no need to call it. Do nothing. */
5573 return const0_rtx;
5574 #endif /* CLEAR_INSN_CACHE */
5575 #else
5576 /* We have a "clear_cache" insn, and it will handle everything. */
5577 tree begin, end;
5578 rtx begin_rtx, end_rtx;
5579 enum insn_code icode;
5581 /* We must not expand to a library call. If we did, any
5582 fallback library function in libgcc that might contain a call to
5583 __builtin___clear_cache() would recurse infinitely. */
5584 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5586 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5587 return const0_rtx;
5590 if (HAVE_clear_cache)
5592 icode = CODE_FOR_clear_cache;
5594 begin = CALL_EXPR_ARG (exp, 0);
5595 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5596 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5597 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5598 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5600 end = CALL_EXPR_ARG (exp, 1);
5601 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5602 end_rtx = convert_memory_address (Pmode, end_rtx);
5603 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5604 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5606 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5608 return const0_rtx;
5609 #endif /* HAVE_clear_cache */
5612 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5614 static rtx
5615 round_trampoline_addr (rtx tramp)
5617 rtx temp, addend, mask;
5619 /* If we don't need too much alignment, we'll have been guaranteed
5620 proper alignment by get_trampoline_type. */
5621 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5622 return tramp;
5624 /* Round address up to desired boundary. */
5625 temp = gen_reg_rtx (Pmode);
5626 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5627 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5629 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5630 temp, 0, OPTAB_LIB_WIDEN);
5631 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5632 temp, 0, OPTAB_LIB_WIDEN);
5634 return tramp;
5637 static rtx
5638 expand_builtin_init_trampoline (tree exp)
5640 tree t_tramp, t_func, t_chain;
5641 rtx r_tramp, r_func, r_chain;
5642 #ifdef TRAMPOLINE_TEMPLATE
5643 rtx blktramp;
5644 #endif
5646 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5647 POINTER_TYPE, VOID_TYPE))
5648 return NULL_RTX;
5650 t_tramp = CALL_EXPR_ARG (exp, 0);
5651 t_func = CALL_EXPR_ARG (exp, 1);
5652 t_chain = CALL_EXPR_ARG (exp, 2);
5654 r_tramp = expand_normal (t_tramp);
5655 r_func = expand_normal (t_func);
5656 r_chain = expand_normal (t_chain);
5658 /* Generate insns to initialize the trampoline. */
5659 r_tramp = round_trampoline_addr (r_tramp);
5660 #ifdef TRAMPOLINE_TEMPLATE
5661 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5662 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5663 emit_block_move (blktramp, assemble_trampoline_template (),
5664 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5665 #endif
5666 trampolines_created = 1;
5667 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5669 return const0_rtx;
5672 static rtx
5673 expand_builtin_adjust_trampoline (tree exp)
5675 rtx tramp;
5677 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5678 return NULL_RTX;
5680 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5681 tramp = round_trampoline_addr (tramp);
5682 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5683 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5684 #endif
5686 return tramp;
5689 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5690 function. The function first checks whether the back end provides
5691 an insn to implement signbit for the respective mode. If not, it
5692 checks whether the floating point format of the value is such that
5693 the sign bit can be extracted. If that is not the case, the
5694 function returns NULL_RTX to indicate that a normal call should be
5695 emitted rather than expanding the function in-line. EXP is the
5696 expression that is a call to the builtin function; if convenient,
5697 the result should be placed in TARGET. */
5698 static rtx
5699 expand_builtin_signbit (tree exp, rtx target)
5701 const struct real_format *fmt;
5702 enum machine_mode fmode, imode, rmode;
5703 HOST_WIDE_INT hi, lo;
5704 tree arg;
5705 int word, bitpos;
5706 enum insn_code icode;
5707 rtx temp;
5709 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5710 return NULL_RTX;
5712 arg = CALL_EXPR_ARG (exp, 0);
5713 fmode = TYPE_MODE (TREE_TYPE (arg));
5714 rmode = TYPE_MODE (TREE_TYPE (exp));
5715 fmt = REAL_MODE_FORMAT (fmode);
5717 arg = builtin_save_expr (arg);
5719 /* Expand the argument yielding a RTX expression. */
5720 temp = expand_normal (arg);
5722 /* Check if the back end provides an insn that handles signbit for the
5723 argument's mode. */
5724 icode = signbit_optab->handlers [(int) fmode].insn_code;
5725 if (icode != CODE_FOR_nothing)
5727 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5728 emit_unop_insn (icode, target, temp, UNKNOWN);
5729 return target;
5732 /* For floating point formats without a sign bit, implement signbit
5733 as "ARG < 0.0". */
5734 bitpos = fmt->signbit_ro;
5735 if (bitpos < 0)
5737 /* But we can't do this if the format supports signed zero. */
5738 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5739 return NULL_RTX;
5741 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5742 build_real (TREE_TYPE (arg), dconst0));
5743 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5746 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5748 imode = int_mode_for_mode (fmode);
5749 if (imode == BLKmode)
5750 return NULL_RTX;
5751 temp = gen_lowpart (imode, temp);
5753 else
5755 imode = word_mode;
5756 /* Handle targets with different FP word orders. */
5757 if (FLOAT_WORDS_BIG_ENDIAN)
5758 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5759 else
5760 word = bitpos / BITS_PER_WORD;
5761 temp = operand_subword_force (temp, word, fmode);
5762 bitpos = bitpos % BITS_PER_WORD;
5765 /* Force the intermediate word_mode (or narrower) result into a
5766 register. This avoids attempting to create paradoxical SUBREGs
5767 of floating point modes below. */
5768 temp = force_reg (imode, temp);
5770 /* If the bitpos is within the "result mode" lowpart, the operation
5771 can be implement with a single bitwise AND. Otherwise, we need
5772 a right shift and an AND. */
5774 if (bitpos < GET_MODE_BITSIZE (rmode))
5776 if (bitpos < HOST_BITS_PER_WIDE_INT)
5778 hi = 0;
5779 lo = (HOST_WIDE_INT) 1 << bitpos;
5781 else
5783 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5784 lo = 0;
5787 if (imode != rmode)
5788 temp = gen_lowpart (rmode, temp);
5789 temp = expand_binop (rmode, and_optab, temp,
5790 immed_double_const (lo, hi, rmode),
5791 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5793 else
5795 /* Perform a logical right shift to place the signbit in the least
5796 significant bit, then truncate the result to the desired mode
5797 and mask just this bit. */
5798 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5799 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5800 temp = gen_lowpart (rmode, temp);
5801 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5802 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5805 return temp;
5808 /* Expand fork or exec calls. TARGET is the desired target of the
5809 call. EXP is the call. FN is the
5810 identificator of the actual function. IGNORE is nonzero if the
5811 value is to be ignored. */
5813 static rtx
5814 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5816 tree id, decl;
5817 tree call;
5819 /* If we are not profiling, just call the function. */
5820 if (!profile_arc_flag)
5821 return NULL_RTX;
5823 /* Otherwise call the wrapper. This should be equivalent for the rest of
5824 compiler, so the code does not diverge, and the wrapper may run the
5825 code necessary for keeping the profiling sane. */
5827 switch (DECL_FUNCTION_CODE (fn))
5829 case BUILT_IN_FORK:
5830 id = get_identifier ("__gcov_fork");
5831 break;
5833 case BUILT_IN_EXECL:
5834 id = get_identifier ("__gcov_execl");
5835 break;
5837 case BUILT_IN_EXECV:
5838 id = get_identifier ("__gcov_execv");
5839 break;
5841 case BUILT_IN_EXECLP:
5842 id = get_identifier ("__gcov_execlp");
5843 break;
5845 case BUILT_IN_EXECLE:
5846 id = get_identifier ("__gcov_execle");
5847 break;
5849 case BUILT_IN_EXECVP:
5850 id = get_identifier ("__gcov_execvp");
5851 break;
5853 case BUILT_IN_EXECVE:
5854 id = get_identifier ("__gcov_execve");
5855 break;
5857 default:
5858 gcc_unreachable ();
5861 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5862 DECL_EXTERNAL (decl) = 1;
5863 TREE_PUBLIC (decl) = 1;
5864 DECL_ARTIFICIAL (decl) = 1;
5865 TREE_NOTHROW (decl) = 1;
5866 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5867 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5868 call = rewrite_call_expr (exp, 0, decl, 0);
5869 return expand_call (call, target, ignore);
5874 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5875 the pointer in these functions is void*, the tree optimizers may remove
5876 casts. The mode computed in expand_builtin isn't reliable either, due
5877 to __sync_bool_compare_and_swap.
5879 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5880 group of builtins. This gives us log2 of the mode size. */
5882 static inline enum machine_mode
5883 get_builtin_sync_mode (int fcode_diff)
5885 /* The size is not negotiable, so ask not to get BLKmode in return
5886 if the target indicates that a smaller size would be better. */
5887 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5890 /* Expand the memory expression LOC and return the appropriate memory operand
5891 for the builtin_sync operations. */
5893 static rtx
5894 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5896 rtx addr, mem;
5898 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5900 /* Note that we explicitly do not want any alias information for this
5901 memory, so that we kill all other live memories. Otherwise we don't
5902 satisfy the full barrier semantics of the intrinsic. */
5903 mem = validize_mem (gen_rtx_MEM (mode, addr));
5905 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5906 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5907 MEM_VOLATILE_P (mem) = 1;
5909 return mem;
5912 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5913 EXP is the CALL_EXPR. CODE is the rtx code
5914 that corresponds to the arithmetic or logical operation from the name;
5915 an exception here is that NOT actually means NAND. TARGET is an optional
5916 place for us to store the results; AFTER is true if this is the
5917 fetch_and_xxx form. IGNORE is true if we don't actually care about
5918 the result of the operation at all. */
5920 static rtx
5921 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5922 enum rtx_code code, bool after,
5923 rtx target, bool ignore)
5925 rtx val, mem;
5926 enum machine_mode old_mode;
5928 /* Expand the operands. */
5929 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5931 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5932 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5933 of CONST_INTs, where we know the old_mode only from the call argument. */
5934 old_mode = GET_MODE (val);
5935 if (old_mode == VOIDmode)
5936 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5937 val = convert_modes (mode, old_mode, val, 1);
5939 if (ignore)
5940 return expand_sync_operation (mem, val, code);
5941 else
5942 return expand_sync_fetch_operation (mem, val, code, after, target);
5945 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5946 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5947 true if this is the boolean form. TARGET is a place for us to store the
5948 results; this is NOT optional if IS_BOOL is true. */
5950 static rtx
5951 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5952 bool is_bool, rtx target)
5954 rtx old_val, new_val, mem;
5955 enum machine_mode old_mode;
5957 /* Expand the operands. */
5958 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5961 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5962 mode, EXPAND_NORMAL);
5963 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5964 of CONST_INTs, where we know the old_mode only from the call argument. */
5965 old_mode = GET_MODE (old_val);
5966 if (old_mode == VOIDmode)
5967 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5968 old_val = convert_modes (mode, old_mode, old_val, 1);
5970 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5971 mode, EXPAND_NORMAL);
5972 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5973 of CONST_INTs, where we know the old_mode only from the call argument. */
5974 old_mode = GET_MODE (new_val);
5975 if (old_mode == VOIDmode)
5976 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5977 new_val = convert_modes (mode, old_mode, new_val, 1);
5979 if (is_bool)
5980 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5981 else
5982 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5985 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5986 general form is actually an atomic exchange, and some targets only
5987 support a reduced form with the second argument being a constant 1.
5988 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5989 the results. */
5991 static rtx
5992 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5993 rtx target)
5995 rtx val, mem;
5996 enum machine_mode old_mode;
5998 /* Expand the operands. */
5999 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6000 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6001 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6002 of CONST_INTs, where we know the old_mode only from the call argument. */
6003 old_mode = GET_MODE (val);
6004 if (old_mode == VOIDmode)
6005 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6006 val = convert_modes (mode, old_mode, val, 1);
6008 return expand_sync_lock_test_and_set (mem, val, target);
6011 /* Expand the __sync_synchronize intrinsic. */
6013 static void
6014 expand_builtin_synchronize (void)
6016 tree x;
6018 #ifdef HAVE_memory_barrier
6019 if (HAVE_memory_barrier)
6021 emit_insn (gen_memory_barrier ());
6022 return;
6024 #endif
6026 /* If no explicit memory barrier instruction is available, create an
6027 empty asm stmt with a memory clobber. */
6028 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6029 tree_cons (NULL, build_string (6, "memory"), NULL));
6030 ASM_VOLATILE_P (x) = 1;
6031 expand_asm_expr (x);
6034 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6036 static void
6037 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6039 enum insn_code icode;
6040 rtx mem, insn;
6041 rtx val = const0_rtx;
6043 /* Expand the operands. */
6044 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6046 /* If there is an explicit operation in the md file, use it. */
6047 icode = sync_lock_release[mode];
6048 if (icode != CODE_FOR_nothing)
6050 if (!insn_data[icode].operand[1].predicate (val, mode))
6051 val = force_reg (mode, val);
6053 insn = GEN_FCN (icode) (mem, val);
6054 if (insn)
6056 emit_insn (insn);
6057 return;
6061 /* Otherwise we can implement this operation by emitting a barrier
6062 followed by a store of zero. */
6063 expand_builtin_synchronize ();
6064 emit_move_insn (mem, val);
6067 /* Expand an expression EXP that calls a built-in function,
6068 with result going to TARGET if that's convenient
6069 (and in mode MODE if that's convenient).
6070 SUBTARGET may be used as the target for computing one of EXP's operands.
6071 IGNORE is nonzero if the value is to be ignored. */
6074 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6075 int ignore)
6077 tree fndecl = get_callee_fndecl (exp);
6078 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6079 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6081 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6082 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6084 /* When not optimizing, generate calls to library functions for a certain
6085 set of builtins. */
6086 if (!optimize
6087 && !called_as_built_in (fndecl)
6088 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6089 && fcode != BUILT_IN_ALLOCA)
6090 return expand_call (exp, target, ignore);
6092 /* The built-in function expanders test for target == const0_rtx
6093 to determine whether the function's result will be ignored. */
6094 if (ignore)
6095 target = const0_rtx;
6097 /* If the result of a pure or const built-in function is ignored, and
6098 none of its arguments are volatile, we can avoid expanding the
6099 built-in call and just evaluate the arguments for side-effects. */
6100 if (target == const0_rtx
6101 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6103 bool volatilep = false;
6104 tree arg;
6105 call_expr_arg_iterator iter;
6107 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6108 if (TREE_THIS_VOLATILE (arg))
6110 volatilep = true;
6111 break;
6114 if (! volatilep)
6116 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6117 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6118 return const0_rtx;
6122 switch (fcode)
6124 CASE_FLT_FN (BUILT_IN_FABS):
6125 target = expand_builtin_fabs (exp, target, subtarget);
6126 if (target)
6127 return target;
6128 break;
6130 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6131 target = expand_builtin_copysign (exp, target, subtarget);
6132 if (target)
6133 return target;
6134 break;
6136 /* Just do a normal library call if we were unable to fold
6137 the values. */
6138 CASE_FLT_FN (BUILT_IN_CABS):
6139 break;
6141 CASE_FLT_FN (BUILT_IN_EXP):
6142 CASE_FLT_FN (BUILT_IN_EXP10):
6143 CASE_FLT_FN (BUILT_IN_POW10):
6144 CASE_FLT_FN (BUILT_IN_EXP2):
6145 CASE_FLT_FN (BUILT_IN_EXPM1):
6146 CASE_FLT_FN (BUILT_IN_LOGB):
6147 CASE_FLT_FN (BUILT_IN_LOG):
6148 CASE_FLT_FN (BUILT_IN_LOG10):
6149 CASE_FLT_FN (BUILT_IN_LOG2):
6150 CASE_FLT_FN (BUILT_IN_LOG1P):
6151 CASE_FLT_FN (BUILT_IN_TAN):
6152 CASE_FLT_FN (BUILT_IN_ASIN):
6153 CASE_FLT_FN (BUILT_IN_ACOS):
6154 CASE_FLT_FN (BUILT_IN_ATAN):
6155 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6156 because of possible accuracy problems. */
6157 if (! flag_unsafe_math_optimizations)
6158 break;
6159 CASE_FLT_FN (BUILT_IN_SQRT):
6160 CASE_FLT_FN (BUILT_IN_FLOOR):
6161 CASE_FLT_FN (BUILT_IN_CEIL):
6162 CASE_FLT_FN (BUILT_IN_TRUNC):
6163 CASE_FLT_FN (BUILT_IN_ROUND):
6164 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6165 CASE_FLT_FN (BUILT_IN_RINT):
6166 target = expand_builtin_mathfn (exp, target, subtarget);
6167 if (target)
6168 return target;
6169 break;
6171 CASE_FLT_FN (BUILT_IN_ILOGB):
6172 if (! flag_unsafe_math_optimizations)
6173 break;
6174 CASE_FLT_FN (BUILT_IN_ISINF):
6175 CASE_FLT_FN (BUILT_IN_FINITE):
6176 case BUILT_IN_ISFINITE:
6177 case BUILT_IN_ISNORMAL:
6178 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6179 if (target)
6180 return target;
6181 break;
6183 CASE_FLT_FN (BUILT_IN_LCEIL):
6184 CASE_FLT_FN (BUILT_IN_LLCEIL):
6185 CASE_FLT_FN (BUILT_IN_LFLOOR):
6186 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6187 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6188 if (target)
6189 return target;
6190 break;
6192 CASE_FLT_FN (BUILT_IN_LRINT):
6193 CASE_FLT_FN (BUILT_IN_LLRINT):
6194 CASE_FLT_FN (BUILT_IN_LROUND):
6195 CASE_FLT_FN (BUILT_IN_LLROUND):
6196 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6197 if (target)
6198 return target;
6199 break;
6201 CASE_FLT_FN (BUILT_IN_POW):
6202 target = expand_builtin_pow (exp, target, subtarget);
6203 if (target)
6204 return target;
6205 break;
6207 CASE_FLT_FN (BUILT_IN_POWI):
6208 target = expand_builtin_powi (exp, target, subtarget);
6209 if (target)
6210 return target;
6211 break;
6213 CASE_FLT_FN (BUILT_IN_ATAN2):
6214 CASE_FLT_FN (BUILT_IN_LDEXP):
6215 CASE_FLT_FN (BUILT_IN_SCALB):
6216 CASE_FLT_FN (BUILT_IN_SCALBN):
6217 CASE_FLT_FN (BUILT_IN_SCALBLN):
6218 if (! flag_unsafe_math_optimizations)
6219 break;
6221 CASE_FLT_FN (BUILT_IN_FMOD):
6222 CASE_FLT_FN (BUILT_IN_REMAINDER):
6223 CASE_FLT_FN (BUILT_IN_DREM):
6224 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6225 if (target)
6226 return target;
6227 break;
6229 CASE_FLT_FN (BUILT_IN_CEXPI):
6230 target = expand_builtin_cexpi (exp, target, subtarget);
6231 gcc_assert (target);
6232 return target;
6234 CASE_FLT_FN (BUILT_IN_SIN):
6235 CASE_FLT_FN (BUILT_IN_COS):
6236 if (! flag_unsafe_math_optimizations)
6237 break;
6238 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6239 if (target)
6240 return target;
6241 break;
6243 CASE_FLT_FN (BUILT_IN_SINCOS):
6244 if (! flag_unsafe_math_optimizations)
6245 break;
6246 target = expand_builtin_sincos (exp);
6247 if (target)
6248 return target;
6249 break;
6251 case BUILT_IN_APPLY_ARGS:
6252 return expand_builtin_apply_args ();
6254 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6255 FUNCTION with a copy of the parameters described by
6256 ARGUMENTS, and ARGSIZE. It returns a block of memory
6257 allocated on the stack into which is stored all the registers
6258 that might possibly be used for returning the result of a
6259 function. ARGUMENTS is the value returned by
6260 __builtin_apply_args. ARGSIZE is the number of bytes of
6261 arguments that must be copied. ??? How should this value be
6262 computed? We'll also need a safe worst case value for varargs
6263 functions. */
6264 case BUILT_IN_APPLY:
6265 if (!validate_arglist (exp, POINTER_TYPE,
6266 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6267 && !validate_arglist (exp, REFERENCE_TYPE,
6268 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6269 return const0_rtx;
6270 else
6272 rtx ops[3];
6274 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6275 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6276 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6278 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6281 /* __builtin_return (RESULT) causes the function to return the
6282 value described by RESULT. RESULT is address of the block of
6283 memory returned by __builtin_apply. */
6284 case BUILT_IN_RETURN:
6285 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6286 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6287 return const0_rtx;
6289 case BUILT_IN_SAVEREGS:
6290 return expand_builtin_saveregs ();
6292 case BUILT_IN_ARGS_INFO:
6293 return expand_builtin_args_info (exp);
6295 case BUILT_IN_VA_ARG_PACK:
6296 /* All valid uses of __builtin_va_arg_pack () are removed during
6297 inlining. */
6298 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6299 return const0_rtx;
6301 case BUILT_IN_VA_ARG_PACK_LEN:
6302 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6303 inlining. */
6304 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6305 return const0_rtx;
6307 /* Return the address of the first anonymous stack arg. */
6308 case BUILT_IN_NEXT_ARG:
6309 if (fold_builtin_next_arg (exp, false))
6310 return const0_rtx;
6311 return expand_builtin_next_arg ();
6313 case BUILT_IN_CLEAR_CACHE:
6314 target = expand_builtin___clear_cache (exp);
6315 if (target)
6316 return target;
6317 break;
6319 case BUILT_IN_CLASSIFY_TYPE:
6320 return expand_builtin_classify_type (exp);
6322 case BUILT_IN_CONSTANT_P:
6323 return const0_rtx;
6325 case BUILT_IN_FRAME_ADDRESS:
6326 case BUILT_IN_RETURN_ADDRESS:
6327 return expand_builtin_frame_address (fndecl, exp);
6329 /* Returns the address of the area where the structure is returned.
6330 0 otherwise. */
6331 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6332 if (call_expr_nargs (exp) != 0
6333 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6334 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6335 return const0_rtx;
6336 else
6337 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6339 case BUILT_IN_ALLOCA:
6340 target = expand_builtin_alloca (exp, target);
6341 if (target)
6342 return target;
6343 break;
6345 case BUILT_IN_STACK_SAVE:
6346 return expand_stack_save ();
6348 case BUILT_IN_STACK_RESTORE:
6349 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6350 return const0_rtx;
6352 case BUILT_IN_BSWAP32:
6353 case BUILT_IN_BSWAP64:
6354 target = expand_builtin_bswap (exp, target, subtarget);
6356 if (target)
6357 return target;
6358 break;
6360 CASE_INT_FN (BUILT_IN_FFS):
6361 case BUILT_IN_FFSIMAX:
6362 target = expand_builtin_unop (target_mode, exp, target,
6363 subtarget, ffs_optab);
6364 if (target)
6365 return target;
6366 break;
6368 CASE_INT_FN (BUILT_IN_CLZ):
6369 case BUILT_IN_CLZIMAX:
6370 target = expand_builtin_unop (target_mode, exp, target,
6371 subtarget, clz_optab);
6372 if (target)
6373 return target;
6374 break;
6376 CASE_INT_FN (BUILT_IN_CTZ):
6377 case BUILT_IN_CTZIMAX:
6378 target = expand_builtin_unop (target_mode, exp, target,
6379 subtarget, ctz_optab);
6380 if (target)
6381 return target;
6382 break;
6384 CASE_INT_FN (BUILT_IN_POPCOUNT):
6385 case BUILT_IN_POPCOUNTIMAX:
6386 target = expand_builtin_unop (target_mode, exp, target,
6387 subtarget, popcount_optab);
6388 if (target)
6389 return target;
6390 break;
6392 CASE_INT_FN (BUILT_IN_PARITY):
6393 case BUILT_IN_PARITYIMAX:
6394 target = expand_builtin_unop (target_mode, exp, target,
6395 subtarget, parity_optab);
6396 if (target)
6397 return target;
6398 break;
6400 case BUILT_IN_STRLEN:
6401 target = expand_builtin_strlen (exp, target, target_mode);
6402 if (target)
6403 return target;
6404 break;
6406 case BUILT_IN_STRCPY:
6407 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6408 if (target)
6409 return target;
6410 break;
6412 case BUILT_IN_STRNCPY:
6413 target = expand_builtin_strncpy (exp, target, mode);
6414 if (target)
6415 return target;
6416 break;
6418 case BUILT_IN_STPCPY:
6419 target = expand_builtin_stpcpy (exp, target, mode);
6420 if (target)
6421 return target;
6422 break;
6424 case BUILT_IN_STRCAT:
6425 target = expand_builtin_strcat (fndecl, exp, target, mode);
6426 if (target)
6427 return target;
6428 break;
6430 case BUILT_IN_STRNCAT:
6431 target = expand_builtin_strncat (exp, target, mode);
6432 if (target)
6433 return target;
6434 break;
6436 case BUILT_IN_STRSPN:
6437 target = expand_builtin_strspn (exp, target, mode);
6438 if (target)
6439 return target;
6440 break;
6442 case BUILT_IN_STRCSPN:
6443 target = expand_builtin_strcspn (exp, target, mode);
6444 if (target)
6445 return target;
6446 break;
6448 case BUILT_IN_STRSTR:
6449 target = expand_builtin_strstr (exp, target, mode);
6450 if (target)
6451 return target;
6452 break;
6454 case BUILT_IN_STRPBRK:
6455 target = expand_builtin_strpbrk (exp, target, mode);
6456 if (target)
6457 return target;
6458 break;
6460 case BUILT_IN_INDEX:
6461 case BUILT_IN_STRCHR:
6462 target = expand_builtin_strchr (exp, target, mode);
6463 if (target)
6464 return target;
6465 break;
6467 case BUILT_IN_RINDEX:
6468 case BUILT_IN_STRRCHR:
6469 target = expand_builtin_strrchr (exp, target, mode);
6470 if (target)
6471 return target;
6472 break;
6474 case BUILT_IN_MEMCPY:
6475 target = expand_builtin_memcpy (exp, target, mode);
6476 if (target)
6477 return target;
6478 break;
6480 case BUILT_IN_MEMPCPY:
6481 target = expand_builtin_mempcpy (exp, target, mode);
6482 if (target)
6483 return target;
6484 break;
6486 case BUILT_IN_MEMMOVE:
6487 target = expand_builtin_memmove (exp, target, mode, ignore);
6488 if (target)
6489 return target;
6490 break;
6492 case BUILT_IN_BCOPY:
6493 target = expand_builtin_bcopy (exp, ignore);
6494 if (target)
6495 return target;
6496 break;
6498 case BUILT_IN_MEMSET:
6499 target = expand_builtin_memset (exp, target, mode);
6500 if (target)
6501 return target;
6502 break;
6504 case BUILT_IN_BZERO:
6505 target = expand_builtin_bzero (exp);
6506 if (target)
6507 return target;
6508 break;
6510 case BUILT_IN_STRCMP:
6511 target = expand_builtin_strcmp (exp, target, mode);
6512 if (target)
6513 return target;
6514 break;
6516 case BUILT_IN_STRNCMP:
6517 target = expand_builtin_strncmp (exp, target, mode);
6518 if (target)
6519 return target;
6520 break;
6522 case BUILT_IN_MEMCHR:
6523 target = expand_builtin_memchr (exp, target, mode);
6524 if (target)
6525 return target;
6526 break;
6528 case BUILT_IN_BCMP:
6529 case BUILT_IN_MEMCMP:
6530 target = expand_builtin_memcmp (exp, target, mode);
6531 if (target)
6532 return target;
6533 break;
6535 case BUILT_IN_SETJMP:
6536 /* This should have been lowered to the builtins below. */
6537 gcc_unreachable ();
6539 case BUILT_IN_SETJMP_SETUP:
6540 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6541 and the receiver label. */
6542 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6544 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6545 VOIDmode, EXPAND_NORMAL);
6546 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6547 rtx label_r = label_rtx (label);
6549 /* This is copied from the handling of non-local gotos. */
6550 expand_builtin_setjmp_setup (buf_addr, label_r);
6551 nonlocal_goto_handler_labels
6552 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6553 nonlocal_goto_handler_labels);
6554 /* ??? Do not let expand_label treat us as such since we would
6555 not want to be both on the list of non-local labels and on
6556 the list of forced labels. */
6557 FORCED_LABEL (label) = 0;
6558 return const0_rtx;
6560 break;
6562 case BUILT_IN_SETJMP_DISPATCHER:
6563 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6564 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6566 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6567 rtx label_r = label_rtx (label);
6569 /* Remove the dispatcher label from the list of non-local labels
6570 since the receiver labels have been added to it above. */
6571 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6572 return const0_rtx;
6574 break;
6576 case BUILT_IN_SETJMP_RECEIVER:
6577 /* __builtin_setjmp_receiver is passed the receiver label. */
6578 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6580 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6581 rtx label_r = label_rtx (label);
6583 expand_builtin_setjmp_receiver (label_r);
6584 return const0_rtx;
6586 break;
6588 /* __builtin_longjmp is passed a pointer to an array of five words.
6589 It's similar to the C library longjmp function but works with
6590 __builtin_setjmp above. */
6591 case BUILT_IN_LONGJMP:
6592 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6594 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6595 VOIDmode, EXPAND_NORMAL);
6596 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6598 if (value != const1_rtx)
6600 error ("%<__builtin_longjmp%> second argument must be 1");
6601 return const0_rtx;
6604 expand_builtin_longjmp (buf_addr, value);
6605 return const0_rtx;
6607 break;
6609 case BUILT_IN_NONLOCAL_GOTO:
6610 target = expand_builtin_nonlocal_goto (exp);
6611 if (target)
6612 return target;
6613 break;
6615 /* This updates the setjmp buffer that is its argument with the value
6616 of the current stack pointer. */
6617 case BUILT_IN_UPDATE_SETJMP_BUF:
6618 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6620 rtx buf_addr
6621 = expand_normal (CALL_EXPR_ARG (exp, 0));
6623 expand_builtin_update_setjmp_buf (buf_addr);
6624 return const0_rtx;
6626 break;
6628 case BUILT_IN_TRAP:
6629 expand_builtin_trap ();
6630 return const0_rtx;
6632 case BUILT_IN_PRINTF:
6633 target = expand_builtin_printf (exp, target, mode, false);
6634 if (target)
6635 return target;
6636 break;
6638 case BUILT_IN_PRINTF_UNLOCKED:
6639 target = expand_builtin_printf (exp, target, mode, true);
6640 if (target)
6641 return target;
6642 break;
6644 case BUILT_IN_FPUTS:
6645 target = expand_builtin_fputs (exp, target, false);
6646 if (target)
6647 return target;
6648 break;
6649 case BUILT_IN_FPUTS_UNLOCKED:
6650 target = expand_builtin_fputs (exp, target, true);
6651 if (target)
6652 return target;
6653 break;
6655 case BUILT_IN_FPRINTF:
6656 target = expand_builtin_fprintf (exp, target, mode, false);
6657 if (target)
6658 return target;
6659 break;
6661 case BUILT_IN_FPRINTF_UNLOCKED:
6662 target = expand_builtin_fprintf (exp, target, mode, true);
6663 if (target)
6664 return target;
6665 break;
6667 case BUILT_IN_SPRINTF:
6668 target = expand_builtin_sprintf (exp, target, mode);
6669 if (target)
6670 return target;
6671 break;
6673 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6674 case BUILT_IN_SIGNBITD32:
6675 case BUILT_IN_SIGNBITD64:
6676 case BUILT_IN_SIGNBITD128:
6677 target = expand_builtin_signbit (exp, target);
6678 if (target)
6679 return target;
6680 break;
6682 /* Various hooks for the DWARF 2 __throw routine. */
6683 case BUILT_IN_UNWIND_INIT:
6684 expand_builtin_unwind_init ();
6685 return const0_rtx;
6686 case BUILT_IN_DWARF_CFA:
6687 return virtual_cfa_rtx;
6688 #ifdef DWARF2_UNWIND_INFO
6689 case BUILT_IN_DWARF_SP_COLUMN:
6690 return expand_builtin_dwarf_sp_column ();
6691 case BUILT_IN_INIT_DWARF_REG_SIZES:
6692 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6693 return const0_rtx;
6694 #endif
6695 case BUILT_IN_FROB_RETURN_ADDR:
6696 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6697 case BUILT_IN_EXTRACT_RETURN_ADDR:
6698 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6699 case BUILT_IN_EH_RETURN:
6700 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6701 CALL_EXPR_ARG (exp, 1));
6702 return const0_rtx;
6703 #ifdef EH_RETURN_DATA_REGNO
6704 case BUILT_IN_EH_RETURN_DATA_REGNO:
6705 return expand_builtin_eh_return_data_regno (exp);
6706 #endif
6707 case BUILT_IN_EXTEND_POINTER:
6708 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6710 case BUILT_IN_VA_START:
6711 return expand_builtin_va_start (exp);
6712 case BUILT_IN_VA_END:
6713 return expand_builtin_va_end (exp);
6714 case BUILT_IN_VA_COPY:
6715 return expand_builtin_va_copy (exp);
6716 case BUILT_IN_EXPECT:
6717 return expand_builtin_expect (exp, target);
6718 case BUILT_IN_PREFETCH:
6719 expand_builtin_prefetch (exp);
6720 return const0_rtx;
6722 case BUILT_IN_PROFILE_FUNC_ENTER:
6723 return expand_builtin_profile_func (false);
6724 case BUILT_IN_PROFILE_FUNC_EXIT:
6725 return expand_builtin_profile_func (true);
6727 case BUILT_IN_INIT_TRAMPOLINE:
6728 return expand_builtin_init_trampoline (exp);
6729 case BUILT_IN_ADJUST_TRAMPOLINE:
6730 return expand_builtin_adjust_trampoline (exp);
6732 case BUILT_IN_FORK:
6733 case BUILT_IN_EXECL:
6734 case BUILT_IN_EXECV:
6735 case BUILT_IN_EXECLP:
6736 case BUILT_IN_EXECLE:
6737 case BUILT_IN_EXECVP:
6738 case BUILT_IN_EXECVE:
6739 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6740 if (target)
6741 return target;
6742 break;
6744 case BUILT_IN_FETCH_AND_ADD_1:
6745 case BUILT_IN_FETCH_AND_ADD_2:
6746 case BUILT_IN_FETCH_AND_ADD_4:
6747 case BUILT_IN_FETCH_AND_ADD_8:
6748 case BUILT_IN_FETCH_AND_ADD_16:
6749 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6750 target = expand_builtin_sync_operation (mode, exp, PLUS,
6751 false, target, ignore);
6752 if (target)
6753 return target;
6754 break;
6756 case BUILT_IN_FETCH_AND_SUB_1:
6757 case BUILT_IN_FETCH_AND_SUB_2:
6758 case BUILT_IN_FETCH_AND_SUB_4:
6759 case BUILT_IN_FETCH_AND_SUB_8:
6760 case BUILT_IN_FETCH_AND_SUB_16:
6761 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6762 target = expand_builtin_sync_operation (mode, exp, MINUS,
6763 false, target, ignore);
6764 if (target)
6765 return target;
6766 break;
6768 case BUILT_IN_FETCH_AND_OR_1:
6769 case BUILT_IN_FETCH_AND_OR_2:
6770 case BUILT_IN_FETCH_AND_OR_4:
6771 case BUILT_IN_FETCH_AND_OR_8:
6772 case BUILT_IN_FETCH_AND_OR_16:
6773 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6774 target = expand_builtin_sync_operation (mode, exp, IOR,
6775 false, target, ignore);
6776 if (target)
6777 return target;
6778 break;
6780 case BUILT_IN_FETCH_AND_AND_1:
6781 case BUILT_IN_FETCH_AND_AND_2:
6782 case BUILT_IN_FETCH_AND_AND_4:
6783 case BUILT_IN_FETCH_AND_AND_8:
6784 case BUILT_IN_FETCH_AND_AND_16:
6785 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6786 target = expand_builtin_sync_operation (mode, exp, AND,
6787 false, target, ignore);
6788 if (target)
6789 return target;
6790 break;
6792 case BUILT_IN_FETCH_AND_XOR_1:
6793 case BUILT_IN_FETCH_AND_XOR_2:
6794 case BUILT_IN_FETCH_AND_XOR_4:
6795 case BUILT_IN_FETCH_AND_XOR_8:
6796 case BUILT_IN_FETCH_AND_XOR_16:
6797 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6798 target = expand_builtin_sync_operation (mode, exp, XOR,
6799 false, target, ignore);
6800 if (target)
6801 return target;
6802 break;
6804 case BUILT_IN_FETCH_AND_NAND_1:
6805 case BUILT_IN_FETCH_AND_NAND_2:
6806 case BUILT_IN_FETCH_AND_NAND_4:
6807 case BUILT_IN_FETCH_AND_NAND_8:
6808 case BUILT_IN_FETCH_AND_NAND_16:
6809 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6810 target = expand_builtin_sync_operation (mode, exp, NOT,
6811 false, target, ignore);
6812 if (target)
6813 return target;
6814 break;
6816 case BUILT_IN_ADD_AND_FETCH_1:
6817 case BUILT_IN_ADD_AND_FETCH_2:
6818 case BUILT_IN_ADD_AND_FETCH_4:
6819 case BUILT_IN_ADD_AND_FETCH_8:
6820 case BUILT_IN_ADD_AND_FETCH_16:
6821 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6822 target = expand_builtin_sync_operation (mode, exp, PLUS,
6823 true, target, ignore);
6824 if (target)
6825 return target;
6826 break;
6828 case BUILT_IN_SUB_AND_FETCH_1:
6829 case BUILT_IN_SUB_AND_FETCH_2:
6830 case BUILT_IN_SUB_AND_FETCH_4:
6831 case BUILT_IN_SUB_AND_FETCH_8:
6832 case BUILT_IN_SUB_AND_FETCH_16:
6833 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6834 target = expand_builtin_sync_operation (mode, exp, MINUS,
6835 true, target, ignore);
6836 if (target)
6837 return target;
6838 break;
6840 case BUILT_IN_OR_AND_FETCH_1:
6841 case BUILT_IN_OR_AND_FETCH_2:
6842 case BUILT_IN_OR_AND_FETCH_4:
6843 case BUILT_IN_OR_AND_FETCH_8:
6844 case BUILT_IN_OR_AND_FETCH_16:
6845 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6846 target = expand_builtin_sync_operation (mode, exp, IOR,
6847 true, target, ignore);
6848 if (target)
6849 return target;
6850 break;
6852 case BUILT_IN_AND_AND_FETCH_1:
6853 case BUILT_IN_AND_AND_FETCH_2:
6854 case BUILT_IN_AND_AND_FETCH_4:
6855 case BUILT_IN_AND_AND_FETCH_8:
6856 case BUILT_IN_AND_AND_FETCH_16:
6857 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6858 target = expand_builtin_sync_operation (mode, exp, AND,
6859 true, target, ignore);
6860 if (target)
6861 return target;
6862 break;
6864 case BUILT_IN_XOR_AND_FETCH_1:
6865 case BUILT_IN_XOR_AND_FETCH_2:
6866 case BUILT_IN_XOR_AND_FETCH_4:
6867 case BUILT_IN_XOR_AND_FETCH_8:
6868 case BUILT_IN_XOR_AND_FETCH_16:
6869 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6870 target = expand_builtin_sync_operation (mode, exp, XOR,
6871 true, target, ignore);
6872 if (target)
6873 return target;
6874 break;
6876 case BUILT_IN_NAND_AND_FETCH_1:
6877 case BUILT_IN_NAND_AND_FETCH_2:
6878 case BUILT_IN_NAND_AND_FETCH_4:
6879 case BUILT_IN_NAND_AND_FETCH_8:
6880 case BUILT_IN_NAND_AND_FETCH_16:
6881 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6882 target = expand_builtin_sync_operation (mode, exp, NOT,
6883 true, target, ignore);
6884 if (target)
6885 return target;
6886 break;
6888 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6889 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6890 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6891 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6892 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6893 if (mode == VOIDmode)
6894 mode = TYPE_MODE (boolean_type_node);
6895 if (!target || !register_operand (target, mode))
6896 target = gen_reg_rtx (mode);
6898 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6899 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6900 if (target)
6901 return target;
6902 break;
6904 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6905 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6906 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6907 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6908 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6909 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6910 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6911 if (target)
6912 return target;
6913 break;
6915 case BUILT_IN_LOCK_TEST_AND_SET_1:
6916 case BUILT_IN_LOCK_TEST_AND_SET_2:
6917 case BUILT_IN_LOCK_TEST_AND_SET_4:
6918 case BUILT_IN_LOCK_TEST_AND_SET_8:
6919 case BUILT_IN_LOCK_TEST_AND_SET_16:
6920 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6921 target = expand_builtin_lock_test_and_set (mode, exp, target);
6922 if (target)
6923 return target;
6924 break;
6926 case BUILT_IN_LOCK_RELEASE_1:
6927 case BUILT_IN_LOCK_RELEASE_2:
6928 case BUILT_IN_LOCK_RELEASE_4:
6929 case BUILT_IN_LOCK_RELEASE_8:
6930 case BUILT_IN_LOCK_RELEASE_16:
6931 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6932 expand_builtin_lock_release (mode, exp);
6933 return const0_rtx;
6935 case BUILT_IN_SYNCHRONIZE:
6936 expand_builtin_synchronize ();
6937 return const0_rtx;
6939 case BUILT_IN_OBJECT_SIZE:
6940 return expand_builtin_object_size (exp);
6942 case BUILT_IN_MEMCPY_CHK:
6943 case BUILT_IN_MEMPCPY_CHK:
6944 case BUILT_IN_MEMMOVE_CHK:
6945 case BUILT_IN_MEMSET_CHK:
6946 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6947 if (target)
6948 return target;
6949 break;
6951 case BUILT_IN_STRCPY_CHK:
6952 case BUILT_IN_STPCPY_CHK:
6953 case BUILT_IN_STRNCPY_CHK:
6954 case BUILT_IN_STRCAT_CHK:
6955 case BUILT_IN_STRNCAT_CHK:
6956 case BUILT_IN_SNPRINTF_CHK:
6957 case BUILT_IN_VSNPRINTF_CHK:
6958 maybe_emit_chk_warning (exp, fcode);
6959 break;
6961 case BUILT_IN_SPRINTF_CHK:
6962 case BUILT_IN_VSPRINTF_CHK:
6963 maybe_emit_sprintf_chk_warning (exp, fcode);
6964 break;
6966 default: /* just do library call, if unknown builtin */
6967 break;
6970 /* The switch statement above can drop through to cause the function
6971 to be called normally. */
6972 return expand_call (exp, target, ignore);
6975 /* Determine whether a tree node represents a call to a built-in
6976 function. If the tree T is a call to a built-in function with
6977 the right number of arguments of the appropriate types, return
6978 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6979 Otherwise the return value is END_BUILTINS. */
6981 enum built_in_function
6982 builtin_mathfn_code (const_tree t)
6984 const_tree fndecl, arg, parmlist;
6985 const_tree argtype, parmtype;
6986 const_call_expr_arg_iterator iter;
6988 if (TREE_CODE (t) != CALL_EXPR
6989 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6990 return END_BUILTINS;
6992 fndecl = get_callee_fndecl (t);
6993 if (fndecl == NULL_TREE
6994 || TREE_CODE (fndecl) != FUNCTION_DECL
6995 || ! DECL_BUILT_IN (fndecl)
6996 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6997 return END_BUILTINS;
6999 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7000 init_const_call_expr_arg_iterator (t, &iter);
7001 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7003 /* If a function doesn't take a variable number of arguments,
7004 the last element in the list will have type `void'. */
7005 parmtype = TREE_VALUE (parmlist);
7006 if (VOID_TYPE_P (parmtype))
7008 if (more_const_call_expr_args_p (&iter))
7009 return END_BUILTINS;
7010 return DECL_FUNCTION_CODE (fndecl);
7013 if (! more_const_call_expr_args_p (&iter))
7014 return END_BUILTINS;
7016 arg = next_const_call_expr_arg (&iter);
7017 argtype = TREE_TYPE (arg);
7019 if (SCALAR_FLOAT_TYPE_P (parmtype))
7021 if (! SCALAR_FLOAT_TYPE_P (argtype))
7022 return END_BUILTINS;
7024 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7026 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7027 return END_BUILTINS;
7029 else if (POINTER_TYPE_P (parmtype))
7031 if (! POINTER_TYPE_P (argtype))
7032 return END_BUILTINS;
7034 else if (INTEGRAL_TYPE_P (parmtype))
7036 if (! INTEGRAL_TYPE_P (argtype))
7037 return END_BUILTINS;
7039 else
7040 return END_BUILTINS;
7043 /* Variable-length argument list. */
7044 return DECL_FUNCTION_CODE (fndecl);
7047 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7048 evaluate to a constant. */
7050 static tree
7051 fold_builtin_constant_p (tree arg)
7053 /* We return 1 for a numeric type that's known to be a constant
7054 value at compile-time or for an aggregate type that's a
7055 literal constant. */
7056 STRIP_NOPS (arg);
7058 /* If we know this is a constant, emit the constant of one. */
7059 if (CONSTANT_CLASS_P (arg)
7060 || (TREE_CODE (arg) == CONSTRUCTOR
7061 && TREE_CONSTANT (arg)))
7062 return integer_one_node;
7063 if (TREE_CODE (arg) == ADDR_EXPR)
7065 tree op = TREE_OPERAND (arg, 0);
7066 if (TREE_CODE (op) == STRING_CST
7067 || (TREE_CODE (op) == ARRAY_REF
7068 && integer_zerop (TREE_OPERAND (op, 1))
7069 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7070 return integer_one_node;
7073 /* If this expression has side effects, show we don't know it to be a
7074 constant. Likewise if it's a pointer or aggregate type since in
7075 those case we only want literals, since those are only optimized
7076 when generating RTL, not later.
7077 And finally, if we are compiling an initializer, not code, we
7078 need to return a definite result now; there's not going to be any
7079 more optimization done. */
7080 if (TREE_SIDE_EFFECTS (arg)
7081 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7082 || POINTER_TYPE_P (TREE_TYPE (arg))
7083 || cfun == 0
7084 || folding_initializer)
7085 return integer_zero_node;
7087 return NULL_TREE;
7090 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7091 return it as a truthvalue. */
7093 static tree
7094 build_builtin_expect_predicate (tree pred, tree expected)
7096 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7098 fn = built_in_decls[BUILT_IN_EXPECT];
7099 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7100 ret_type = TREE_TYPE (TREE_TYPE (fn));
7101 pred_type = TREE_VALUE (arg_types);
7102 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7104 pred = fold_convert (pred_type, pred);
7105 expected = fold_convert (expected_type, expected);
7106 call_expr = build_call_expr (fn, 2, pred, expected);
7108 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7109 build_int_cst (ret_type, 0));
7112 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7113 NULL_TREE if no simplification is possible. */
7115 static tree
7116 fold_builtin_expect (tree arg0, tree arg1)
7118 tree inner, fndecl;
7119 enum tree_code code;
7121 /* If this is a builtin_expect within a builtin_expect keep the
7122 inner one. See through a comparison against a constant. It
7123 might have been added to create a thruthvalue. */
7124 inner = arg0;
7125 if (COMPARISON_CLASS_P (inner)
7126 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7127 inner = TREE_OPERAND (inner, 0);
7129 if (TREE_CODE (inner) == CALL_EXPR
7130 && (fndecl = get_callee_fndecl (inner))
7131 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7132 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7133 return arg0;
7135 /* Distribute the expected value over short-circuiting operators.
7136 See through the cast from truthvalue_type_node to long. */
7137 inner = arg0;
7138 while (TREE_CODE (inner) == NOP_EXPR
7139 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7140 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7141 inner = TREE_OPERAND (inner, 0);
7143 code = TREE_CODE (inner);
7144 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7146 tree op0 = TREE_OPERAND (inner, 0);
7147 tree op1 = TREE_OPERAND (inner, 1);
7149 op0 = build_builtin_expect_predicate (op0, arg1);
7150 op1 = build_builtin_expect_predicate (op1, arg1);
7151 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7153 return fold_convert (TREE_TYPE (arg0), inner);
7156 /* If the argument isn't invariant then there's nothing else we can do. */
7157 if (!TREE_CONSTANT (arg0))
7158 return NULL_TREE;
7160 /* If we expect that a comparison against the argument will fold to
7161 a constant return the constant. In practice, this means a true
7162 constant or the address of a non-weak symbol. */
7163 inner = arg0;
7164 STRIP_NOPS (inner);
7165 if (TREE_CODE (inner) == ADDR_EXPR)
7169 inner = TREE_OPERAND (inner, 0);
7171 while (TREE_CODE (inner) == COMPONENT_REF
7172 || TREE_CODE (inner) == ARRAY_REF);
7173 if (DECL_P (inner) && DECL_WEAK (inner))
7174 return NULL_TREE;
7177 /* Otherwise, ARG0 already has the proper type for the return value. */
7178 return arg0;
7181 /* Fold a call to __builtin_classify_type with argument ARG. */
7183 static tree
7184 fold_builtin_classify_type (tree arg)
7186 if (arg == 0)
7187 return build_int_cst (NULL_TREE, no_type_class);
7189 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7192 /* Fold a call to __builtin_strlen with argument ARG. */
7194 static tree
7195 fold_builtin_strlen (tree arg)
7197 if (!validate_arg (arg, POINTER_TYPE))
7198 return NULL_TREE;
7199 else
7201 tree len = c_strlen (arg, 0);
7203 if (len)
7205 /* Convert from the internal "sizetype" type to "size_t". */
7206 if (size_type_node)
7207 len = fold_convert (size_type_node, len);
7208 return len;
7211 return NULL_TREE;
7215 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7217 static tree
7218 fold_builtin_inf (tree type, int warn)
7220 REAL_VALUE_TYPE real;
7222 /* __builtin_inff is intended to be usable to define INFINITY on all
7223 targets. If an infinity is not available, INFINITY expands "to a
7224 positive constant of type float that overflows at translation
7225 time", footnote "In this case, using INFINITY will violate the
7226 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7227 Thus we pedwarn to ensure this constraint violation is
7228 diagnosed. */
7229 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7230 pedwarn ("target format does not support infinity");
7232 real_inf (&real);
7233 return build_real (type, real);
7236 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7238 static tree
7239 fold_builtin_nan (tree arg, tree type, int quiet)
7241 REAL_VALUE_TYPE real;
7242 const char *str;
7244 if (!validate_arg (arg, POINTER_TYPE))
7245 return NULL_TREE;
7246 str = c_getstr (arg);
7247 if (!str)
7248 return NULL_TREE;
7250 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7251 return NULL_TREE;
7253 return build_real (type, real);
7256 /* Return true if the floating point expression T has an integer value.
7257 We also allow +Inf, -Inf and NaN to be considered integer values. */
7259 static bool
7260 integer_valued_real_p (tree t)
7262 switch (TREE_CODE (t))
7264 case FLOAT_EXPR:
7265 return true;
7267 case ABS_EXPR:
7268 case SAVE_EXPR:
7269 return integer_valued_real_p (TREE_OPERAND (t, 0));
7271 case COMPOUND_EXPR:
7272 case MODIFY_EXPR:
7273 case BIND_EXPR:
7274 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7276 case PLUS_EXPR:
7277 case MINUS_EXPR:
7278 case MULT_EXPR:
7279 case MIN_EXPR:
7280 case MAX_EXPR:
7281 return integer_valued_real_p (TREE_OPERAND (t, 0))
7282 && integer_valued_real_p (TREE_OPERAND (t, 1));
7284 case COND_EXPR:
7285 return integer_valued_real_p (TREE_OPERAND (t, 1))
7286 && integer_valued_real_p (TREE_OPERAND (t, 2));
7288 case REAL_CST:
7289 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7291 case NOP_EXPR:
7293 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7294 if (TREE_CODE (type) == INTEGER_TYPE)
7295 return true;
7296 if (TREE_CODE (type) == REAL_TYPE)
7297 return integer_valued_real_p (TREE_OPERAND (t, 0));
7298 break;
7301 case CALL_EXPR:
7302 switch (builtin_mathfn_code (t))
7304 CASE_FLT_FN (BUILT_IN_CEIL):
7305 CASE_FLT_FN (BUILT_IN_FLOOR):
7306 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7307 CASE_FLT_FN (BUILT_IN_RINT):
7308 CASE_FLT_FN (BUILT_IN_ROUND):
7309 CASE_FLT_FN (BUILT_IN_TRUNC):
7310 return true;
7312 CASE_FLT_FN (BUILT_IN_FMIN):
7313 CASE_FLT_FN (BUILT_IN_FMAX):
7314 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7315 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7317 default:
7318 break;
7320 break;
7322 default:
7323 break;
7325 return false;
7328 /* FNDECL is assumed to be a builtin where truncation can be propagated
7329 across (for instance floor((double)f) == (double)floorf (f).
7330 Do the transformation for a call with argument ARG. */
7332 static tree
7333 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7335 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7337 if (!validate_arg (arg, REAL_TYPE))
7338 return NULL_TREE;
7340 /* Integer rounding functions are idempotent. */
7341 if (fcode == builtin_mathfn_code (arg))
7342 return arg;
7344 /* If argument is already integer valued, and we don't need to worry
7345 about setting errno, there's no need to perform rounding. */
7346 if (! flag_errno_math && integer_valued_real_p (arg))
7347 return arg;
7349 if (optimize)
7351 tree arg0 = strip_float_extensions (arg);
7352 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7353 tree newtype = TREE_TYPE (arg0);
7354 tree decl;
7356 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7357 && (decl = mathfn_built_in (newtype, fcode)))
7358 return fold_convert (ftype,
7359 build_call_expr (decl, 1,
7360 fold_convert (newtype, arg0)));
7362 return NULL_TREE;
7365 /* FNDECL is assumed to be builtin which can narrow the FP type of
7366 the argument, for instance lround((double)f) -> lroundf (f).
7367 Do the transformation for a call with argument ARG. */
7369 static tree
7370 fold_fixed_mathfn (tree fndecl, tree arg)
7372 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7374 if (!validate_arg (arg, REAL_TYPE))
7375 return NULL_TREE;
7377 /* If argument is already integer valued, and we don't need to worry
7378 about setting errno, there's no need to perform rounding. */
7379 if (! flag_errno_math && integer_valued_real_p (arg))
7380 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7382 if (optimize)
7384 tree ftype = TREE_TYPE (arg);
7385 tree arg0 = strip_float_extensions (arg);
7386 tree newtype = TREE_TYPE (arg0);
7387 tree decl;
7389 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7390 && (decl = mathfn_built_in (newtype, fcode)))
7391 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7394 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7395 sizeof (long long) == sizeof (long). */
7396 if (TYPE_PRECISION (long_long_integer_type_node)
7397 == TYPE_PRECISION (long_integer_type_node))
7399 tree newfn = NULL_TREE;
7400 switch (fcode)
7402 CASE_FLT_FN (BUILT_IN_LLCEIL):
7403 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7404 break;
7406 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7407 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7408 break;
7410 CASE_FLT_FN (BUILT_IN_LLROUND):
7411 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7412 break;
7414 CASE_FLT_FN (BUILT_IN_LLRINT):
7415 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7416 break;
7418 default:
7419 break;
7422 if (newfn)
7424 tree newcall = build_call_expr(newfn, 1, arg);
7425 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7429 return NULL_TREE;
7432 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7433 return type. Return NULL_TREE if no simplification can be made. */
7435 static tree
7436 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7438 tree res;
7440 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7441 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7442 return NULL_TREE;
7444 /* Calculate the result when the argument is a constant. */
7445 if (TREE_CODE (arg) == COMPLEX_CST
7446 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7447 type, mpfr_hypot)))
7448 return res;
7450 if (TREE_CODE (arg) == COMPLEX_EXPR)
7452 tree real = TREE_OPERAND (arg, 0);
7453 tree imag = TREE_OPERAND (arg, 1);
7455 /* If either part is zero, cabs is fabs of the other. */
7456 if (real_zerop (real))
7457 return fold_build1 (ABS_EXPR, type, imag);
7458 if (real_zerop (imag))
7459 return fold_build1 (ABS_EXPR, type, real);
7461 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7462 if (flag_unsafe_math_optimizations
7463 && operand_equal_p (real, imag, OEP_PURE_SAME))
7465 const REAL_VALUE_TYPE sqrt2_trunc
7466 = real_value_truncate (TYPE_MODE (type),
7467 *get_real_const (rv_sqrt2));
7468 STRIP_NOPS (real);
7469 return fold_build2 (MULT_EXPR, type,
7470 fold_build1 (ABS_EXPR, type, real),
7471 build_real (type, sqrt2_trunc));
7475 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7476 if (TREE_CODE (arg) == NEGATE_EXPR
7477 || TREE_CODE (arg) == CONJ_EXPR)
7478 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7480 /* Don't do this when optimizing for size. */
7481 if (flag_unsafe_math_optimizations
7482 && optimize && !optimize_size)
7484 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7486 if (sqrtfn != NULL_TREE)
7488 tree rpart, ipart, result;
7490 arg = builtin_save_expr (arg);
7492 rpart = fold_build1 (REALPART_EXPR, type, arg);
7493 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7495 rpart = builtin_save_expr (rpart);
7496 ipart = builtin_save_expr (ipart);
7498 result = fold_build2 (PLUS_EXPR, type,
7499 fold_build2 (MULT_EXPR, type,
7500 rpart, rpart),
7501 fold_build2 (MULT_EXPR, type,
7502 ipart, ipart));
7504 return build_call_expr (sqrtfn, 1, result);
7508 return NULL_TREE;
7511 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7512 Return NULL_TREE if no simplification can be made. */
7514 static tree
7515 fold_builtin_sqrt (tree arg, tree type)
7518 enum built_in_function fcode;
7519 tree res;
7521 if (!validate_arg (arg, REAL_TYPE))
7522 return NULL_TREE;
7524 /* Calculate the result when the argument is a constant. */
7525 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7526 return res;
7528 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7529 fcode = builtin_mathfn_code (arg);
7530 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7532 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7533 arg = fold_build2 (MULT_EXPR, type,
7534 CALL_EXPR_ARG (arg, 0),
7535 build_real (type, dconsthalf));
7536 return build_call_expr (expfn, 1, arg);
7539 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7540 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7542 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7544 if (powfn)
7546 tree arg0 = CALL_EXPR_ARG (arg, 0);
7547 tree tree_root;
7548 /* The inner root was either sqrt or cbrt. */
7549 REAL_VALUE_TYPE dconstroot =
7550 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7552 /* Adjust for the outer root. */
7553 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7554 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7555 tree_root = build_real (type, dconstroot);
7556 return build_call_expr (powfn, 2, arg0, tree_root);
7560 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7561 if (flag_unsafe_math_optimizations
7562 && (fcode == BUILT_IN_POW
7563 || fcode == BUILT_IN_POWF
7564 || fcode == BUILT_IN_POWL))
7566 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7567 tree arg0 = CALL_EXPR_ARG (arg, 0);
7568 tree arg1 = CALL_EXPR_ARG (arg, 1);
7569 tree narg1;
7570 if (!tree_expr_nonnegative_p (arg0))
7571 arg0 = build1 (ABS_EXPR, type, arg0);
7572 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7573 build_real (type, dconsthalf));
7574 return build_call_expr (powfn, 2, arg0, narg1);
7577 return NULL_TREE;
7580 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7581 Return NULL_TREE if no simplification can be made. */
7583 static tree
7584 fold_builtin_cbrt (tree arg, tree type)
7586 const enum built_in_function fcode = builtin_mathfn_code (arg);
7587 tree res;
7589 if (!validate_arg (arg, REAL_TYPE))
7590 return NULL_TREE;
7592 /* Calculate the result when the argument is a constant. */
7593 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7594 return res;
7596 if (flag_unsafe_math_optimizations)
7598 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7599 if (BUILTIN_EXPONENT_P (fcode))
7601 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7602 const REAL_VALUE_TYPE third_trunc =
7603 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7604 arg = fold_build2 (MULT_EXPR, type,
7605 CALL_EXPR_ARG (arg, 0),
7606 build_real (type, third_trunc));
7607 return build_call_expr (expfn, 1, arg);
7610 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7611 if (BUILTIN_SQRT_P (fcode))
7613 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7615 if (powfn)
7617 tree arg0 = CALL_EXPR_ARG (arg, 0);
7618 tree tree_root;
7619 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7621 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7622 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7623 tree_root = build_real (type, dconstroot);
7624 return build_call_expr (powfn, 2, arg0, tree_root);
7628 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7629 if (BUILTIN_CBRT_P (fcode))
7631 tree arg0 = CALL_EXPR_ARG (arg, 0);
7632 if (tree_expr_nonnegative_p (arg0))
7634 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7636 if (powfn)
7638 tree tree_root;
7639 REAL_VALUE_TYPE dconstroot;
7641 real_arithmetic (&dconstroot, MULT_EXPR,
7642 get_real_const (rv_third),
7643 get_real_const (rv_third));
7644 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7645 tree_root = build_real (type, dconstroot);
7646 return build_call_expr (powfn, 2, arg0, tree_root);
7651 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7652 if (fcode == BUILT_IN_POW
7653 || fcode == BUILT_IN_POWF
7654 || fcode == BUILT_IN_POWL)
7656 tree arg00 = CALL_EXPR_ARG (arg, 0);
7657 tree arg01 = CALL_EXPR_ARG (arg, 1);
7658 if (tree_expr_nonnegative_p (arg00))
7660 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7661 const REAL_VALUE_TYPE dconstroot
7662 = real_value_truncate (TYPE_MODE (type),
7663 *get_real_const (rv_third));
7664 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7665 build_real (type, dconstroot));
7666 return build_call_expr (powfn, 2, arg00, narg01);
7670 return NULL_TREE;
7673 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7674 TYPE is the type of the return value. Return NULL_TREE if no
7675 simplification can be made. */
7677 static tree
7678 fold_builtin_cos (tree arg, tree type, tree fndecl)
7680 tree res, narg;
7682 if (!validate_arg (arg, REAL_TYPE))
7683 return NULL_TREE;
7685 /* Calculate the result when the argument is a constant. */
7686 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7687 return res;
7689 /* Optimize cos(-x) into cos (x). */
7690 if ((narg = fold_strip_sign_ops (arg)))
7691 return build_call_expr (fndecl, 1, narg);
7693 return NULL_TREE;
7696 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7697 Return NULL_TREE if no simplification can be made. */
7699 static tree
7700 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7702 if (validate_arg (arg, REAL_TYPE))
7704 tree res, narg;
7706 /* Calculate the result when the argument is a constant. */
7707 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7708 return res;
7710 /* Optimize cosh(-x) into cosh (x). */
7711 if ((narg = fold_strip_sign_ops (arg)))
7712 return build_call_expr (fndecl, 1, narg);
7715 return NULL_TREE;
7718 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7719 Return NULL_TREE if no simplification can be made. */
7721 static tree
7722 fold_builtin_tan (tree arg, tree type)
7724 enum built_in_function fcode;
7725 tree res;
7727 if (!validate_arg (arg, REAL_TYPE))
7728 return NULL_TREE;
7730 /* Calculate the result when the argument is a constant. */
7731 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7732 return res;
7734 /* Optimize tan(atan(x)) = x. */
7735 fcode = builtin_mathfn_code (arg);
7736 if (flag_unsafe_math_optimizations
7737 && (fcode == BUILT_IN_ATAN
7738 || fcode == BUILT_IN_ATANF
7739 || fcode == BUILT_IN_ATANL))
7740 return CALL_EXPR_ARG (arg, 0);
7742 return NULL_TREE;
7745 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7746 NULL_TREE if no simplification can be made. */
7748 static tree
7749 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7751 tree type;
7752 tree res, fn, call;
7754 if (!validate_arg (arg0, REAL_TYPE)
7755 || !validate_arg (arg1, POINTER_TYPE)
7756 || !validate_arg (arg2, POINTER_TYPE))
7757 return NULL_TREE;
7759 type = TREE_TYPE (arg0);
7761 /* Calculate the result when the argument is a constant. */
7762 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7763 return res;
7765 /* Canonicalize sincos to cexpi. */
7766 if (!TARGET_C99_FUNCTIONS)
7767 return NULL_TREE;
7768 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7769 if (!fn)
7770 return NULL_TREE;
7772 call = build_call_expr (fn, 1, arg0);
7773 call = builtin_save_expr (call);
7775 return build2 (COMPOUND_EXPR, type,
7776 build2 (MODIFY_EXPR, void_type_node,
7777 build_fold_indirect_ref (arg1),
7778 build1 (IMAGPART_EXPR, type, call)),
7779 build2 (MODIFY_EXPR, void_type_node,
7780 build_fold_indirect_ref (arg2),
7781 build1 (REALPART_EXPR, type, call)));
7784 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7785 NULL_TREE if no simplification can be made. */
7787 static tree
7788 fold_builtin_cexp (tree arg0, tree type)
7790 tree rtype;
7791 tree realp, imagp, ifn;
7793 if (!validate_arg (arg0, COMPLEX_TYPE))
7794 return NULL_TREE;
7796 rtype = TREE_TYPE (TREE_TYPE (arg0));
7798 /* In case we can figure out the real part of arg0 and it is constant zero
7799 fold to cexpi. */
7800 if (!TARGET_C99_FUNCTIONS)
7801 return NULL_TREE;
7802 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7803 if (!ifn)
7804 return NULL_TREE;
7806 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7807 && real_zerop (realp))
7809 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7810 return build_call_expr (ifn, 1, narg);
7813 /* In case we can easily decompose real and imaginary parts split cexp
7814 to exp (r) * cexpi (i). */
7815 if (flag_unsafe_math_optimizations
7816 && realp)
7818 tree rfn, rcall, icall;
7820 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7821 if (!rfn)
7822 return NULL_TREE;
7824 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7825 if (!imagp)
7826 return NULL_TREE;
7828 icall = build_call_expr (ifn, 1, imagp);
7829 icall = builtin_save_expr (icall);
7830 rcall = build_call_expr (rfn, 1, realp);
7831 rcall = builtin_save_expr (rcall);
7832 return fold_build2 (COMPLEX_EXPR, type,
7833 fold_build2 (MULT_EXPR, rtype,
7834 rcall,
7835 fold_build1 (REALPART_EXPR, rtype, icall)),
7836 fold_build2 (MULT_EXPR, rtype,
7837 rcall,
7838 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7841 return NULL_TREE;
7844 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7845 Return NULL_TREE if no simplification can be made. */
7847 static tree
7848 fold_builtin_trunc (tree fndecl, tree arg)
7850 if (!validate_arg (arg, REAL_TYPE))
7851 return NULL_TREE;
7853 /* Optimize trunc of constant value. */
7854 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7856 REAL_VALUE_TYPE r, x;
7857 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7859 x = TREE_REAL_CST (arg);
7860 real_trunc (&r, TYPE_MODE (type), &x);
7861 return build_real (type, r);
7864 return fold_trunc_transparent_mathfn (fndecl, arg);
7867 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7868 Return NULL_TREE if no simplification can be made. */
7870 static tree
7871 fold_builtin_floor (tree fndecl, tree arg)
7873 if (!validate_arg (arg, REAL_TYPE))
7874 return NULL_TREE;
7876 /* Optimize floor of constant value. */
7877 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7879 REAL_VALUE_TYPE x;
7881 x = TREE_REAL_CST (arg);
7882 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7884 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7885 REAL_VALUE_TYPE r;
7887 real_floor (&r, TYPE_MODE (type), &x);
7888 return build_real (type, r);
7892 /* Fold floor (x) where x is nonnegative to trunc (x). */
7893 if (tree_expr_nonnegative_p (arg))
7895 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7896 if (truncfn)
7897 return build_call_expr (truncfn, 1, arg);
7900 return fold_trunc_transparent_mathfn (fndecl, arg);
7903 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7904 Return NULL_TREE if no simplification can be made. */
7906 static tree
7907 fold_builtin_ceil (tree fndecl, tree arg)
7909 if (!validate_arg (arg, REAL_TYPE))
7910 return NULL_TREE;
7912 /* Optimize ceil of constant value. */
7913 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7915 REAL_VALUE_TYPE x;
7917 x = TREE_REAL_CST (arg);
7918 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7920 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7921 REAL_VALUE_TYPE r;
7923 real_ceil (&r, TYPE_MODE (type), &x);
7924 return build_real (type, r);
7928 return fold_trunc_transparent_mathfn (fndecl, arg);
7931 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7932 Return NULL_TREE if no simplification can be made. */
7934 static tree
7935 fold_builtin_round (tree fndecl, tree arg)
7937 if (!validate_arg (arg, REAL_TYPE))
7938 return NULL_TREE;
7940 /* Optimize round of constant value. */
7941 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7943 REAL_VALUE_TYPE x;
7945 x = TREE_REAL_CST (arg);
7946 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7948 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7949 REAL_VALUE_TYPE r;
7951 real_round (&r, TYPE_MODE (type), &x);
7952 return build_real (type, r);
7956 return fold_trunc_transparent_mathfn (fndecl, arg);
7959 /* Fold function call to builtin lround, lroundf or lroundl (or the
7960 corresponding long long versions) and other rounding functions. ARG
7961 is the argument to the call. Return NULL_TREE if no simplification
7962 can be made. */
7964 static tree
7965 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7967 if (!validate_arg (arg, REAL_TYPE))
7968 return NULL_TREE;
7970 /* Optimize lround of constant value. */
7971 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7973 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7975 if (real_isfinite (&x))
7977 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7978 tree ftype = TREE_TYPE (arg);
7979 unsigned HOST_WIDE_INT lo2;
7980 HOST_WIDE_INT hi, lo;
7981 REAL_VALUE_TYPE r;
7983 switch (DECL_FUNCTION_CODE (fndecl))
7985 CASE_FLT_FN (BUILT_IN_LFLOOR):
7986 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7987 real_floor (&r, TYPE_MODE (ftype), &x);
7988 break;
7990 CASE_FLT_FN (BUILT_IN_LCEIL):
7991 CASE_FLT_FN (BUILT_IN_LLCEIL):
7992 real_ceil (&r, TYPE_MODE (ftype), &x);
7993 break;
7995 CASE_FLT_FN (BUILT_IN_LROUND):
7996 CASE_FLT_FN (BUILT_IN_LLROUND):
7997 real_round (&r, TYPE_MODE (ftype), &x);
7998 break;
8000 default:
8001 gcc_unreachable ();
8004 REAL_VALUE_TO_INT (&lo, &hi, r);
8005 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8006 return build_int_cst_wide (itype, lo2, hi);
8010 switch (DECL_FUNCTION_CODE (fndecl))
8012 CASE_FLT_FN (BUILT_IN_LFLOOR):
8013 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8014 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8015 if (tree_expr_nonnegative_p (arg))
8016 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8017 arg);
8018 break;
8019 default:;
8022 return fold_fixed_mathfn (fndecl, arg);
8025 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8026 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8027 the argument to the call. Return NULL_TREE if no simplification can
8028 be made. */
8030 static tree
8031 fold_builtin_bitop (tree fndecl, tree arg)
8033 if (!validate_arg (arg, INTEGER_TYPE))
8034 return NULL_TREE;
8036 /* Optimize for constant argument. */
8037 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8039 HOST_WIDE_INT hi, width, result;
8040 unsigned HOST_WIDE_INT lo;
8041 tree type;
8043 type = TREE_TYPE (arg);
8044 width = TYPE_PRECISION (type);
8045 lo = TREE_INT_CST_LOW (arg);
8047 /* Clear all the bits that are beyond the type's precision. */
8048 if (width > HOST_BITS_PER_WIDE_INT)
8050 hi = TREE_INT_CST_HIGH (arg);
8051 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8052 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8054 else
8056 hi = 0;
8057 if (width < HOST_BITS_PER_WIDE_INT)
8058 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8061 switch (DECL_FUNCTION_CODE (fndecl))
8063 CASE_INT_FN (BUILT_IN_FFS):
8064 if (lo != 0)
8065 result = exact_log2 (lo & -lo) + 1;
8066 else if (hi != 0)
8067 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8068 else
8069 result = 0;
8070 break;
8072 CASE_INT_FN (BUILT_IN_CLZ):
8073 if (hi != 0)
8074 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8075 else if (lo != 0)
8076 result = width - floor_log2 (lo) - 1;
8077 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8078 result = width;
8079 break;
8081 CASE_INT_FN (BUILT_IN_CTZ):
8082 if (lo != 0)
8083 result = exact_log2 (lo & -lo);
8084 else if (hi != 0)
8085 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8086 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8087 result = width;
8088 break;
8090 CASE_INT_FN (BUILT_IN_POPCOUNT):
8091 result = 0;
8092 while (lo)
8093 result++, lo &= lo - 1;
8094 while (hi)
8095 result++, hi &= hi - 1;
8096 break;
8098 CASE_INT_FN (BUILT_IN_PARITY):
8099 result = 0;
8100 while (lo)
8101 result++, lo &= lo - 1;
8102 while (hi)
8103 result++, hi &= hi - 1;
8104 result &= 1;
8105 break;
8107 default:
8108 gcc_unreachable ();
8111 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8114 return NULL_TREE;
8117 /* Fold function call to builtin_bswap and the long and long long
8118 variants. Return NULL_TREE if no simplification can be made. */
8119 static tree
8120 fold_builtin_bswap (tree fndecl, tree arg)
8122 if (! validate_arg (arg, INTEGER_TYPE))
8123 return NULL_TREE;
8125 /* Optimize constant value. */
8126 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8128 HOST_WIDE_INT hi, width, r_hi = 0;
8129 unsigned HOST_WIDE_INT lo, r_lo = 0;
8130 tree type;
8132 type = TREE_TYPE (arg);
8133 width = TYPE_PRECISION (type);
8134 lo = TREE_INT_CST_LOW (arg);
8135 hi = TREE_INT_CST_HIGH (arg);
8137 switch (DECL_FUNCTION_CODE (fndecl))
8139 case BUILT_IN_BSWAP32:
8140 case BUILT_IN_BSWAP64:
8142 int s;
8144 for (s = 0; s < width; s += 8)
8146 int d = width - s - 8;
8147 unsigned HOST_WIDE_INT byte;
8149 if (s < HOST_BITS_PER_WIDE_INT)
8150 byte = (lo >> s) & 0xff;
8151 else
8152 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8154 if (d < HOST_BITS_PER_WIDE_INT)
8155 r_lo |= byte << d;
8156 else
8157 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8161 break;
8163 default:
8164 gcc_unreachable ();
8167 if (width < HOST_BITS_PER_WIDE_INT)
8168 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8169 else
8170 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8173 return NULL_TREE;
8176 /* Return true if EXPR is the real constant contained in VALUE. */
8178 static bool
8179 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8181 STRIP_NOPS (expr);
8183 return ((TREE_CODE (expr) == REAL_CST
8184 && !TREE_OVERFLOW (expr)
8185 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8186 || (TREE_CODE (expr) == COMPLEX_CST
8187 && real_dconstp (TREE_REALPART (expr), value)
8188 && real_zerop (TREE_IMAGPART (expr))));
8191 /* A subroutine of fold_builtin to fold the various logarithmic
8192 functions. Return NULL_TREE if no simplification can me made.
8193 FUNC is the corresponding MPFR logarithm function. */
8195 static tree
8196 fold_builtin_logarithm (tree fndecl, tree arg,
8197 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8199 if (validate_arg (arg, REAL_TYPE))
8201 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8202 tree res;
8203 const enum built_in_function fcode = builtin_mathfn_code (arg);
8205 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8206 instead we'll look for 'e' truncated to MODE. So only do
8207 this if flag_unsafe_math_optimizations is set. */
8208 if (flag_unsafe_math_optimizations && func == mpfr_log)
8210 const REAL_VALUE_TYPE e_truncated =
8211 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8212 if (real_dconstp (arg, &e_truncated))
8213 return build_real (type, dconst1);
8216 /* Calculate the result when the argument is a constant. */
8217 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8218 return res;
8220 /* Special case, optimize logN(expN(x)) = x. */
8221 if (flag_unsafe_math_optimizations
8222 && ((func == mpfr_log
8223 && (fcode == BUILT_IN_EXP
8224 || fcode == BUILT_IN_EXPF
8225 || fcode == BUILT_IN_EXPL))
8226 || (func == mpfr_log2
8227 && (fcode == BUILT_IN_EXP2
8228 || fcode == BUILT_IN_EXP2F
8229 || fcode == BUILT_IN_EXP2L))
8230 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8231 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8233 /* Optimize logN(func()) for various exponential functions. We
8234 want to determine the value "x" and the power "exponent" in
8235 order to transform logN(x**exponent) into exponent*logN(x). */
8236 if (flag_unsafe_math_optimizations)
8238 tree exponent = 0, x = 0;
8240 switch (fcode)
8242 CASE_FLT_FN (BUILT_IN_EXP):
8243 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8244 x = build_real (type,
8245 real_value_truncate (TYPE_MODE (type),
8246 *get_real_const (rv_e)));
8247 exponent = CALL_EXPR_ARG (arg, 0);
8248 break;
8249 CASE_FLT_FN (BUILT_IN_EXP2):
8250 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8251 x = build_real (type, dconst2);
8252 exponent = CALL_EXPR_ARG (arg, 0);
8253 break;
8254 CASE_FLT_FN (BUILT_IN_EXP10):
8255 CASE_FLT_FN (BUILT_IN_POW10):
8256 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8258 REAL_VALUE_TYPE dconst10;
8259 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8260 x = build_real (type, dconst10);
8262 exponent = CALL_EXPR_ARG (arg, 0);
8263 break;
8264 CASE_FLT_FN (BUILT_IN_SQRT):
8265 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8266 x = CALL_EXPR_ARG (arg, 0);
8267 exponent = build_real (type, dconsthalf);
8268 break;
8269 CASE_FLT_FN (BUILT_IN_CBRT):
8270 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8271 x = CALL_EXPR_ARG (arg, 0);
8272 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8273 *get_real_const (rv_third)));
8274 break;
8275 CASE_FLT_FN (BUILT_IN_POW):
8276 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8277 x = CALL_EXPR_ARG (arg, 0);
8278 exponent = CALL_EXPR_ARG (arg, 1);
8279 break;
8280 default:
8281 break;
8284 /* Now perform the optimization. */
8285 if (x && exponent)
8287 tree logfn = build_call_expr (fndecl, 1, x);
8288 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8293 return NULL_TREE;
8296 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8297 NULL_TREE if no simplification can be made. */
8299 static tree
8300 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8302 tree res, narg0, narg1;
8304 if (!validate_arg (arg0, REAL_TYPE)
8305 || !validate_arg (arg1, REAL_TYPE))
8306 return NULL_TREE;
8308 /* Calculate the result when the argument is a constant. */
8309 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8310 return res;
8312 /* If either argument to hypot has a negate or abs, strip that off.
8313 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8314 narg0 = fold_strip_sign_ops (arg0);
8315 narg1 = fold_strip_sign_ops (arg1);
8316 if (narg0 || narg1)
8318 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8319 narg1 ? narg1 : arg1);
8322 /* If either argument is zero, hypot is fabs of the other. */
8323 if (real_zerop (arg0))
8324 return fold_build1 (ABS_EXPR, type, arg1);
8325 else if (real_zerop (arg1))
8326 return fold_build1 (ABS_EXPR, type, arg0);
8328 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8329 if (flag_unsafe_math_optimizations
8330 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8332 const REAL_VALUE_TYPE sqrt2_trunc
8333 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8334 return fold_build2 (MULT_EXPR, type,
8335 fold_build1 (ABS_EXPR, type, arg0),
8336 build_real (type, sqrt2_trunc));
8339 return NULL_TREE;
8343 /* Fold a builtin function call to pow, powf, or powl. Return
8344 NULL_TREE if no simplification can be made. */
8345 static tree
8346 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8348 tree res;
8350 if (!validate_arg (arg0, REAL_TYPE)
8351 || !validate_arg (arg1, REAL_TYPE))
8352 return NULL_TREE;
8354 /* Calculate the result when the argument is a constant. */
8355 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8356 return res;
8358 /* Optimize pow(1.0,y) = 1.0. */
8359 if (real_onep (arg0))
8360 return omit_one_operand (type, build_real (type, dconst1), arg1);
8362 if (TREE_CODE (arg1) == REAL_CST
8363 && !TREE_OVERFLOW (arg1))
8365 REAL_VALUE_TYPE cint;
8366 REAL_VALUE_TYPE c;
8367 HOST_WIDE_INT n;
8369 c = TREE_REAL_CST (arg1);
8371 /* Optimize pow(x,0.0) = 1.0. */
8372 if (REAL_VALUES_EQUAL (c, dconst0))
8373 return omit_one_operand (type, build_real (type, dconst1),
8374 arg0);
8376 /* Optimize pow(x,1.0) = x. */
8377 if (REAL_VALUES_EQUAL (c, dconst1))
8378 return arg0;
8380 /* Optimize pow(x,-1.0) = 1.0/x. */
8381 if (REAL_VALUES_EQUAL (c, dconstm1))
8382 return fold_build2 (RDIV_EXPR, type,
8383 build_real (type, dconst1), arg0);
8385 /* Optimize pow(x,0.5) = sqrt(x). */
8386 if (flag_unsafe_math_optimizations
8387 && REAL_VALUES_EQUAL (c, dconsthalf))
8389 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8391 if (sqrtfn != NULL_TREE)
8392 return build_call_expr (sqrtfn, 1, arg0);
8395 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8396 if (flag_unsafe_math_optimizations)
8398 const REAL_VALUE_TYPE dconstroot
8399 = real_value_truncate (TYPE_MODE (type),
8400 *get_real_const (rv_third));
8402 if (REAL_VALUES_EQUAL (c, dconstroot))
8404 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8405 if (cbrtfn != NULL_TREE)
8406 return build_call_expr (cbrtfn, 1, arg0);
8410 /* Check for an integer exponent. */
8411 n = real_to_integer (&c);
8412 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8413 if (real_identical (&c, &cint))
8415 /* Attempt to evaluate pow at compile-time. */
8416 if (TREE_CODE (arg0) == REAL_CST
8417 && !TREE_OVERFLOW (arg0))
8419 REAL_VALUE_TYPE x;
8420 bool inexact;
8422 x = TREE_REAL_CST (arg0);
8423 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8424 if (flag_unsafe_math_optimizations || !inexact)
8425 return build_real (type, x);
8428 /* Strip sign ops from even integer powers. */
8429 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8431 tree narg0 = fold_strip_sign_ops (arg0);
8432 if (narg0)
8433 return build_call_expr (fndecl, 2, narg0, arg1);
8438 if (flag_unsafe_math_optimizations)
8440 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8442 /* Optimize pow(expN(x),y) = expN(x*y). */
8443 if (BUILTIN_EXPONENT_P (fcode))
8445 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8446 tree arg = CALL_EXPR_ARG (arg0, 0);
8447 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8448 return build_call_expr (expfn, 1, arg);
8451 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8452 if (BUILTIN_SQRT_P (fcode))
8454 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8455 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8456 build_real (type, dconsthalf));
8457 return build_call_expr (fndecl, 2, narg0, narg1);
8460 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8461 if (BUILTIN_CBRT_P (fcode))
8463 tree arg = CALL_EXPR_ARG (arg0, 0);
8464 if (tree_expr_nonnegative_p (arg))
8466 const REAL_VALUE_TYPE dconstroot
8467 = real_value_truncate (TYPE_MODE (type),
8468 *get_real_const (rv_third));
8469 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8470 build_real (type, dconstroot));
8471 return build_call_expr (fndecl, 2, arg, narg1);
8475 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8476 if (fcode == BUILT_IN_POW
8477 || fcode == BUILT_IN_POWF
8478 || fcode == BUILT_IN_POWL)
8480 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8481 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8482 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8483 return build_call_expr (fndecl, 2, arg00, narg1);
8487 return NULL_TREE;
8490 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8491 Return NULL_TREE if no simplification can be made. */
8492 static tree
8493 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8494 tree arg0, tree arg1, tree type)
8496 if (!validate_arg (arg0, REAL_TYPE)
8497 || !validate_arg (arg1, INTEGER_TYPE))
8498 return NULL_TREE;
8500 /* Optimize pow(1.0,y) = 1.0. */
8501 if (real_onep (arg0))
8502 return omit_one_operand (type, build_real (type, dconst1), arg1);
8504 if (host_integerp (arg1, 0))
8506 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8508 /* Evaluate powi at compile-time. */
8509 if (TREE_CODE (arg0) == REAL_CST
8510 && !TREE_OVERFLOW (arg0))
8512 REAL_VALUE_TYPE x;
8513 x = TREE_REAL_CST (arg0);
8514 real_powi (&x, TYPE_MODE (type), &x, c);
8515 return build_real (type, x);
8518 /* Optimize pow(x,0) = 1.0. */
8519 if (c == 0)
8520 return omit_one_operand (type, build_real (type, dconst1),
8521 arg0);
8523 /* Optimize pow(x,1) = x. */
8524 if (c == 1)
8525 return arg0;
8527 /* Optimize pow(x,-1) = 1.0/x. */
8528 if (c == -1)
8529 return fold_build2 (RDIV_EXPR, type,
8530 build_real (type, dconst1), arg0);
8533 return NULL_TREE;
8536 /* A subroutine of fold_builtin to fold the various exponent
8537 functions. Return NULL_TREE if no simplification can be made.
8538 FUNC is the corresponding MPFR exponent function. */
8540 static tree
8541 fold_builtin_exponent (tree fndecl, tree arg,
8542 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8544 if (validate_arg (arg, REAL_TYPE))
8546 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8547 tree res;
8549 /* Calculate the result when the argument is a constant. */
8550 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8551 return res;
8553 /* Optimize expN(logN(x)) = x. */
8554 if (flag_unsafe_math_optimizations)
8556 const enum built_in_function fcode = builtin_mathfn_code (arg);
8558 if ((func == mpfr_exp
8559 && (fcode == BUILT_IN_LOG
8560 || fcode == BUILT_IN_LOGF
8561 || fcode == BUILT_IN_LOGL))
8562 || (func == mpfr_exp2
8563 && (fcode == BUILT_IN_LOG2
8564 || fcode == BUILT_IN_LOG2F
8565 || fcode == BUILT_IN_LOG2L))
8566 || (func == mpfr_exp10
8567 && (fcode == BUILT_IN_LOG10
8568 || fcode == BUILT_IN_LOG10F
8569 || fcode == BUILT_IN_LOG10L)))
8570 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8574 return NULL_TREE;
8577 /* Return true if VAR is a VAR_DECL or a component thereof. */
8579 static bool
8580 var_decl_component_p (tree var)
8582 tree inner = var;
8583 while (handled_component_p (inner))
8584 inner = TREE_OPERAND (inner, 0);
8585 return SSA_VAR_P (inner);
8588 /* Fold function call to builtin memset. Return
8589 NULL_TREE if no simplification can be made. */
8591 static tree
8592 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8594 tree var, ret;
8595 unsigned HOST_WIDE_INT length, cval;
8597 if (! validate_arg (dest, POINTER_TYPE)
8598 || ! validate_arg (c, INTEGER_TYPE)
8599 || ! validate_arg (len, INTEGER_TYPE))
8600 return NULL_TREE;
8602 if (! host_integerp (len, 1))
8603 return NULL_TREE;
8605 /* If the LEN parameter is zero, return DEST. */
8606 if (integer_zerop (len))
8607 return omit_one_operand (type, dest, c);
8609 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8610 return NULL_TREE;
8612 var = dest;
8613 STRIP_NOPS (var);
8614 if (TREE_CODE (var) != ADDR_EXPR)
8615 return NULL_TREE;
8617 var = TREE_OPERAND (var, 0);
8618 if (TREE_THIS_VOLATILE (var))
8619 return NULL_TREE;
8621 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8622 && !POINTER_TYPE_P (TREE_TYPE (var)))
8623 return NULL_TREE;
8625 if (! var_decl_component_p (var))
8626 return NULL_TREE;
8628 length = tree_low_cst (len, 1);
8629 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8630 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8631 < (int) length)
8632 return NULL_TREE;
8634 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8635 return NULL_TREE;
8637 if (integer_zerop (c))
8638 cval = 0;
8639 else
8641 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8642 return NULL_TREE;
8644 cval = tree_low_cst (c, 1);
8645 cval &= 0xff;
8646 cval |= cval << 8;
8647 cval |= cval << 16;
8648 cval |= (cval << 31) << 1;
8651 ret = build_int_cst_type (TREE_TYPE (var), cval);
8652 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8653 if (ignore)
8654 return ret;
8656 return omit_one_operand (type, dest, ret);
8659 /* Fold function call to builtin memset. Return
8660 NULL_TREE if no simplification can be made. */
8662 static tree
8663 fold_builtin_bzero (tree dest, tree size, bool ignore)
8665 if (! validate_arg (dest, POINTER_TYPE)
8666 || ! validate_arg (size, INTEGER_TYPE))
8667 return NULL_TREE;
8669 if (!ignore)
8670 return NULL_TREE;
8672 /* New argument list transforming bzero(ptr x, int y) to
8673 memset(ptr x, int 0, size_t y). This is done this way
8674 so that if it isn't expanded inline, we fallback to
8675 calling bzero instead of memset. */
8677 return fold_builtin_memset (dest, integer_zero_node,
8678 fold_convert (sizetype, size),
8679 void_type_node, ignore);
8682 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8683 NULL_TREE if no simplification can be made.
8684 If ENDP is 0, return DEST (like memcpy).
8685 If ENDP is 1, return DEST+LEN (like mempcpy).
8686 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8687 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8688 (memmove). */
8690 static tree
8691 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8693 tree destvar, srcvar, expr;
8695 if (! validate_arg (dest, POINTER_TYPE)
8696 || ! validate_arg (src, POINTER_TYPE)
8697 || ! validate_arg (len, INTEGER_TYPE))
8698 return NULL_TREE;
8700 /* If the LEN parameter is zero, return DEST. */
8701 if (integer_zerop (len))
8702 return omit_one_operand (type, dest, src);
8704 /* If SRC and DEST are the same (and not volatile), return
8705 DEST{,+LEN,+LEN-1}. */
8706 if (operand_equal_p (src, dest, 0))
8707 expr = len;
8708 else
8710 tree srctype, desttype;
8711 if (endp == 3)
8713 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8714 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8716 /* Both DEST and SRC must be pointer types.
8717 ??? This is what old code did. Is the testing for pointer types
8718 really mandatory?
8720 If either SRC is readonly or length is 1, we can use memcpy. */
8721 if (dest_align && src_align
8722 && (readonly_data_expr (src)
8723 || (host_integerp (len, 1)
8724 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8725 tree_low_cst (len, 1)))))
8727 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8728 if (!fn)
8729 return NULL_TREE;
8730 return build_call_expr (fn, 3, dest, src, len);
8732 return NULL_TREE;
8735 if (!host_integerp (len, 0))
8736 return NULL_TREE;
8737 /* FIXME:
8738 This logic lose for arguments like (type *)malloc (sizeof (type)),
8739 since we strip the casts of up to VOID return value from malloc.
8740 Perhaps we ought to inherit type from non-VOID argument here? */
8741 STRIP_NOPS (src);
8742 STRIP_NOPS (dest);
8743 srctype = TREE_TYPE (TREE_TYPE (src));
8744 desttype = TREE_TYPE (TREE_TYPE (dest));
8745 if (!srctype || !desttype
8746 || !TYPE_SIZE_UNIT (srctype)
8747 || !TYPE_SIZE_UNIT (desttype)
8748 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8749 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8750 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8751 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8752 return NULL_TREE;
8754 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8755 < (int) TYPE_ALIGN (desttype)
8756 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8757 < (int) TYPE_ALIGN (srctype)))
8758 return NULL_TREE;
8760 if (!ignore)
8761 dest = builtin_save_expr (dest);
8763 srcvar = build_fold_indirect_ref (src);
8764 if (TREE_THIS_VOLATILE (srcvar))
8765 return NULL_TREE;
8766 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8767 return NULL_TREE;
8768 /* With memcpy, it is possible to bypass aliasing rules, so without
8769 this check i. e. execute/20060930-2.c would be misoptimized, because
8770 it use conflicting alias set to hold argument for the memcpy call.
8771 This check is probably unnecesary with -fno-strict-aliasing.
8772 Similarly for destvar. See also PR29286. */
8773 if (!var_decl_component_p (srcvar)
8774 /* Accept: memcpy (*char_var, "test", 1); that simplify
8775 to char_var='t'; */
8776 || is_gimple_min_invariant (srcvar)
8777 || readonly_data_expr (src))
8778 return NULL_TREE;
8780 destvar = build_fold_indirect_ref (dest);
8781 if (TREE_THIS_VOLATILE (destvar))
8782 return NULL_TREE;
8783 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8784 return NULL_TREE;
8785 if (!var_decl_component_p (destvar))
8786 return NULL_TREE;
8788 if (srctype == desttype
8789 || (gimple_in_ssa_p (cfun)
8790 && useless_type_conversion_p (desttype, srctype)))
8791 expr = srcvar;
8792 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8793 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8794 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8795 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8796 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8797 else
8798 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8799 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8802 if (ignore)
8803 return expr;
8805 if (endp == 0 || endp == 3)
8806 return omit_one_operand (type, dest, expr);
8808 if (expr == len)
8809 expr = NULL_TREE;
8811 if (endp == 2)
8812 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8813 ssize_int (1));
8815 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8816 dest = fold_convert (type, dest);
8817 if (expr)
8818 dest = omit_one_operand (type, dest, expr);
8819 return dest;
8822 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8823 If LEN is not NULL, it represents the length of the string to be
8824 copied. Return NULL_TREE if no simplification can be made. */
8826 tree
8827 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8829 tree fn;
8831 if (!validate_arg (dest, POINTER_TYPE)
8832 || !validate_arg (src, POINTER_TYPE))
8833 return NULL_TREE;
8835 /* If SRC and DEST are the same (and not volatile), return DEST. */
8836 if (operand_equal_p (src, dest, 0))
8837 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8839 if (optimize_size)
8840 return NULL_TREE;
8842 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8843 if (!fn)
8844 return NULL_TREE;
8846 if (!len)
8848 len = c_strlen (src, 1);
8849 if (! len || TREE_SIDE_EFFECTS (len))
8850 return NULL_TREE;
8853 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8854 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8855 build_call_expr (fn, 3, dest, src, len));
8858 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8859 If SLEN is not NULL, it represents the length of the source string.
8860 Return NULL_TREE if no simplification can be made. */
8862 tree
8863 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8865 tree fn;
8867 if (!validate_arg (dest, POINTER_TYPE)
8868 || !validate_arg (src, POINTER_TYPE)
8869 || !validate_arg (len, INTEGER_TYPE))
8870 return NULL_TREE;
8872 /* If the LEN parameter is zero, return DEST. */
8873 if (integer_zerop (len))
8874 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8876 /* We can't compare slen with len as constants below if len is not a
8877 constant. */
8878 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8879 return NULL_TREE;
8881 if (!slen)
8882 slen = c_strlen (src, 1);
8884 /* Now, we must be passed a constant src ptr parameter. */
8885 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8886 return NULL_TREE;
8888 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8890 /* We do not support simplification of this case, though we do
8891 support it when expanding trees into RTL. */
8892 /* FIXME: generate a call to __builtin_memset. */
8893 if (tree_int_cst_lt (slen, len))
8894 return NULL_TREE;
8896 /* OK transform into builtin memcpy. */
8897 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8898 if (!fn)
8899 return NULL_TREE;
8900 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8901 build_call_expr (fn, 3, dest, src, len));
8904 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8905 arguments to the call, and TYPE is its return type.
8906 Return NULL_TREE if no simplification can be made. */
8908 static tree
8909 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8911 if (!validate_arg (arg1, POINTER_TYPE)
8912 || !validate_arg (arg2, INTEGER_TYPE)
8913 || !validate_arg (len, INTEGER_TYPE))
8914 return NULL_TREE;
8915 else
8917 const char *p1;
8919 if (TREE_CODE (arg2) != INTEGER_CST
8920 || !host_integerp (len, 1))
8921 return NULL_TREE;
8923 p1 = c_getstr (arg1);
8924 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8926 char c;
8927 const char *r;
8928 tree tem;
8930 if (target_char_cast (arg2, &c))
8931 return NULL_TREE;
8933 r = memchr (p1, c, tree_low_cst (len, 1));
8935 if (r == NULL)
8936 return build_int_cst (TREE_TYPE (arg1), 0);
8938 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8939 size_int (r - p1));
8940 return fold_convert (type, tem);
8942 return NULL_TREE;
8946 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8947 Return NULL_TREE if no simplification can be made. */
8949 static tree
8950 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8952 const char *p1, *p2;
8954 if (!validate_arg (arg1, POINTER_TYPE)
8955 || !validate_arg (arg2, POINTER_TYPE)
8956 || !validate_arg (len, INTEGER_TYPE))
8957 return NULL_TREE;
8959 /* If the LEN parameter is zero, return zero. */
8960 if (integer_zerop (len))
8961 return omit_two_operands (integer_type_node, integer_zero_node,
8962 arg1, arg2);
8964 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8965 if (operand_equal_p (arg1, arg2, 0))
8966 return omit_one_operand (integer_type_node, integer_zero_node, len);
8968 p1 = c_getstr (arg1);
8969 p2 = c_getstr (arg2);
8971 /* If all arguments are constant, and the value of len is not greater
8972 than the lengths of arg1 and arg2, evaluate at compile-time. */
8973 if (host_integerp (len, 1) && p1 && p2
8974 && compare_tree_int (len, strlen (p1) + 1) <= 0
8975 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8977 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8979 if (r > 0)
8980 return integer_one_node;
8981 else if (r < 0)
8982 return integer_minus_one_node;
8983 else
8984 return integer_zero_node;
8987 /* If len parameter is one, return an expression corresponding to
8988 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8989 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8991 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8992 tree cst_uchar_ptr_node
8993 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8995 tree ind1 = fold_convert (integer_type_node,
8996 build1 (INDIRECT_REF, cst_uchar_node,
8997 fold_convert (cst_uchar_ptr_node,
8998 arg1)));
8999 tree ind2 = fold_convert (integer_type_node,
9000 build1 (INDIRECT_REF, cst_uchar_node,
9001 fold_convert (cst_uchar_ptr_node,
9002 arg2)));
9003 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9006 return NULL_TREE;
9009 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9010 Return NULL_TREE if no simplification can be made. */
9012 static tree
9013 fold_builtin_strcmp (tree arg1, tree arg2)
9015 const char *p1, *p2;
9017 if (!validate_arg (arg1, POINTER_TYPE)
9018 || !validate_arg (arg2, POINTER_TYPE))
9019 return NULL_TREE;
9021 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9022 if (operand_equal_p (arg1, arg2, 0))
9023 return integer_zero_node;
9025 p1 = c_getstr (arg1);
9026 p2 = c_getstr (arg2);
9028 if (p1 && p2)
9030 const int i = strcmp (p1, p2);
9031 if (i < 0)
9032 return integer_minus_one_node;
9033 else if (i > 0)
9034 return integer_one_node;
9035 else
9036 return integer_zero_node;
9039 /* If the second arg is "", return *(const unsigned char*)arg1. */
9040 if (p2 && *p2 == '\0')
9042 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9043 tree cst_uchar_ptr_node
9044 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9046 return fold_convert (integer_type_node,
9047 build1 (INDIRECT_REF, cst_uchar_node,
9048 fold_convert (cst_uchar_ptr_node,
9049 arg1)));
9052 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9053 if (p1 && *p1 == '\0')
9055 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9056 tree cst_uchar_ptr_node
9057 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9059 tree temp = fold_convert (integer_type_node,
9060 build1 (INDIRECT_REF, cst_uchar_node,
9061 fold_convert (cst_uchar_ptr_node,
9062 arg2)));
9063 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9066 return NULL_TREE;
9069 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9070 Return NULL_TREE if no simplification can be made. */
9072 static tree
9073 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9075 const char *p1, *p2;
9077 if (!validate_arg (arg1, POINTER_TYPE)
9078 || !validate_arg (arg2, POINTER_TYPE)
9079 || !validate_arg (len, INTEGER_TYPE))
9080 return NULL_TREE;
9082 /* If the LEN parameter is zero, return zero. */
9083 if (integer_zerop (len))
9084 return omit_two_operands (integer_type_node, integer_zero_node,
9085 arg1, arg2);
9087 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9088 if (operand_equal_p (arg1, arg2, 0))
9089 return omit_one_operand (integer_type_node, integer_zero_node, len);
9091 p1 = c_getstr (arg1);
9092 p2 = c_getstr (arg2);
9094 if (host_integerp (len, 1) && p1 && p2)
9096 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9097 if (i > 0)
9098 return integer_one_node;
9099 else if (i < 0)
9100 return integer_minus_one_node;
9101 else
9102 return integer_zero_node;
9105 /* If the second arg is "", and the length is greater than zero,
9106 return *(const unsigned char*)arg1. */
9107 if (p2 && *p2 == '\0'
9108 && TREE_CODE (len) == INTEGER_CST
9109 && tree_int_cst_sgn (len) == 1)
9111 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9112 tree cst_uchar_ptr_node
9113 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9115 return fold_convert (integer_type_node,
9116 build1 (INDIRECT_REF, cst_uchar_node,
9117 fold_convert (cst_uchar_ptr_node,
9118 arg1)));
9121 /* If the first arg is "", and the length is greater than zero,
9122 return -*(const unsigned char*)arg2. */
9123 if (p1 && *p1 == '\0'
9124 && TREE_CODE (len) == INTEGER_CST
9125 && tree_int_cst_sgn (len) == 1)
9127 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9128 tree cst_uchar_ptr_node
9129 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9131 tree temp = fold_convert (integer_type_node,
9132 build1 (INDIRECT_REF, cst_uchar_node,
9133 fold_convert (cst_uchar_ptr_node,
9134 arg2)));
9135 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9138 /* If len parameter is one, return an expression corresponding to
9139 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9140 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9142 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9143 tree cst_uchar_ptr_node
9144 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9146 tree ind1 = fold_convert (integer_type_node,
9147 build1 (INDIRECT_REF, cst_uchar_node,
9148 fold_convert (cst_uchar_ptr_node,
9149 arg1)));
9150 tree ind2 = fold_convert (integer_type_node,
9151 build1 (INDIRECT_REF, cst_uchar_node,
9152 fold_convert (cst_uchar_ptr_node,
9153 arg2)));
9154 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9157 return NULL_TREE;
9160 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9161 ARG. Return NULL_TREE if no simplification can be made. */
9163 static tree
9164 fold_builtin_signbit (tree arg, tree type)
9166 tree temp;
9168 if (!validate_arg (arg, REAL_TYPE))
9169 return NULL_TREE;
9171 /* If ARG is a compile-time constant, determine the result. */
9172 if (TREE_CODE (arg) == REAL_CST
9173 && !TREE_OVERFLOW (arg))
9175 REAL_VALUE_TYPE c;
9177 c = TREE_REAL_CST (arg);
9178 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9179 return fold_convert (type, temp);
9182 /* If ARG is non-negative, the result is always zero. */
9183 if (tree_expr_nonnegative_p (arg))
9184 return omit_one_operand (type, integer_zero_node, arg);
9186 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9187 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9188 return fold_build2 (LT_EXPR, type, arg,
9189 build_real (TREE_TYPE (arg), dconst0));
9191 return NULL_TREE;
9194 /* Fold function call to builtin copysign, copysignf or copysignl with
9195 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9196 be made. */
9198 static tree
9199 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9201 tree tem;
9203 if (!validate_arg (arg1, REAL_TYPE)
9204 || !validate_arg (arg2, REAL_TYPE))
9205 return NULL_TREE;
9207 /* copysign(X,X) is X. */
9208 if (operand_equal_p (arg1, arg2, 0))
9209 return fold_convert (type, arg1);
9211 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9212 if (TREE_CODE (arg1) == REAL_CST
9213 && TREE_CODE (arg2) == REAL_CST
9214 && !TREE_OVERFLOW (arg1)
9215 && !TREE_OVERFLOW (arg2))
9217 REAL_VALUE_TYPE c1, c2;
9219 c1 = TREE_REAL_CST (arg1);
9220 c2 = TREE_REAL_CST (arg2);
9221 /* c1.sign := c2.sign. */
9222 real_copysign (&c1, &c2);
9223 return build_real (type, c1);
9226 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9227 Remember to evaluate Y for side-effects. */
9228 if (tree_expr_nonnegative_p (arg2))
9229 return omit_one_operand (type,
9230 fold_build1 (ABS_EXPR, type, arg1),
9231 arg2);
9233 /* Strip sign changing operations for the first argument. */
9234 tem = fold_strip_sign_ops (arg1);
9235 if (tem)
9236 return build_call_expr (fndecl, 2, tem, arg2);
9238 return NULL_TREE;
9241 /* Fold a call to builtin isascii with argument ARG. */
9243 static tree
9244 fold_builtin_isascii (tree arg)
9246 if (!validate_arg (arg, INTEGER_TYPE))
9247 return NULL_TREE;
9248 else
9250 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9251 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9252 build_int_cst (NULL_TREE,
9253 ~ (unsigned HOST_WIDE_INT) 0x7f));
9254 return fold_build2 (EQ_EXPR, integer_type_node,
9255 arg, integer_zero_node);
9259 /* Fold a call to builtin toascii with argument ARG. */
9261 static tree
9262 fold_builtin_toascii (tree arg)
9264 if (!validate_arg (arg, INTEGER_TYPE))
9265 return NULL_TREE;
9267 /* Transform toascii(c) -> (c & 0x7f). */
9268 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9269 build_int_cst (NULL_TREE, 0x7f));
9272 /* Fold a call to builtin isdigit with argument ARG. */
9274 static tree
9275 fold_builtin_isdigit (tree arg)
9277 if (!validate_arg (arg, INTEGER_TYPE))
9278 return NULL_TREE;
9279 else
9281 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9282 /* According to the C standard, isdigit is unaffected by locale.
9283 However, it definitely is affected by the target character set. */
9284 unsigned HOST_WIDE_INT target_digit0
9285 = lang_hooks.to_target_charset ('0');
9287 if (target_digit0 == 0)
9288 return NULL_TREE;
9290 arg = fold_convert (unsigned_type_node, arg);
9291 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9292 build_int_cst (unsigned_type_node, target_digit0));
9293 return fold_build2 (LE_EXPR, integer_type_node, arg,
9294 build_int_cst (unsigned_type_node, 9));
9298 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9300 static tree
9301 fold_builtin_fabs (tree arg, tree type)
9303 if (!validate_arg (arg, REAL_TYPE))
9304 return NULL_TREE;
9306 arg = fold_convert (type, arg);
9307 if (TREE_CODE (arg) == REAL_CST)
9308 return fold_abs_const (arg, type);
9309 return fold_build1 (ABS_EXPR, type, arg);
9312 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9314 static tree
9315 fold_builtin_abs (tree arg, tree type)
9317 if (!validate_arg (arg, INTEGER_TYPE))
9318 return NULL_TREE;
9320 arg = fold_convert (type, arg);
9321 if (TREE_CODE (arg) == INTEGER_CST)
9322 return fold_abs_const (arg, type);
9323 return fold_build1 (ABS_EXPR, type, arg);
9326 /* Fold a call to builtin fmin or fmax. */
9328 static tree
9329 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9331 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9333 /* Calculate the result when the argument is a constant. */
9334 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9336 if (res)
9337 return res;
9339 /* If either argument is NaN, return the other one. Avoid the
9340 transformation if we get (and honor) a signalling NaN. Using
9341 omit_one_operand() ensures we create a non-lvalue. */
9342 if (TREE_CODE (arg0) == REAL_CST
9343 && real_isnan (&TREE_REAL_CST (arg0))
9344 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9345 || ! TREE_REAL_CST (arg0).signalling))
9346 return omit_one_operand (type, arg1, arg0);
9347 if (TREE_CODE (arg1) == REAL_CST
9348 && real_isnan (&TREE_REAL_CST (arg1))
9349 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9350 || ! TREE_REAL_CST (arg1).signalling))
9351 return omit_one_operand (type, arg0, arg1);
9353 /* Transform fmin/fmax(x,x) -> x. */
9354 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9355 return omit_one_operand (type, arg0, arg1);
9357 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9358 functions to return the numeric arg if the other one is NaN.
9359 These tree codes don't honor that, so only transform if
9360 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9361 handled, so we don't have to worry about it either. */
9362 if (flag_finite_math_only)
9363 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9364 fold_convert (type, arg0),
9365 fold_convert (type, arg1));
9367 return NULL_TREE;
9370 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9372 static tree
9373 fold_builtin_carg (tree arg, tree type)
9375 if (validate_arg (arg, COMPLEX_TYPE))
9377 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9379 if (atan2_fn)
9381 tree new_arg = builtin_save_expr (arg);
9382 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9383 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9384 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9388 return NULL_TREE;
9391 /* Fold a call to builtin logb/ilogb. */
9393 static tree
9394 fold_builtin_logb (tree arg, tree rettype)
9396 if (! validate_arg (arg, REAL_TYPE))
9397 return NULL_TREE;
9399 STRIP_NOPS (arg);
9401 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9403 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9405 switch (value->cl)
9407 case rvc_nan:
9408 case rvc_inf:
9409 /* If arg is Inf or NaN and we're logb, return it. */
9410 if (TREE_CODE (rettype) == REAL_TYPE)
9411 return fold_convert (rettype, arg);
9412 /* Fall through... */
9413 case rvc_zero:
9414 /* Zero may set errno and/or raise an exception for logb, also
9415 for ilogb we don't know FP_ILOGB0. */
9416 return NULL_TREE;
9417 case rvc_normal:
9418 /* For normal numbers, proceed iff radix == 2. In GCC,
9419 normalized significands are in the range [0.5, 1.0). We
9420 want the exponent as if they were [1.0, 2.0) so get the
9421 exponent and subtract 1. */
9422 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9423 return fold_convert (rettype, build_int_cst (NULL_TREE,
9424 REAL_EXP (value)-1));
9425 break;
9429 return NULL_TREE;
9432 /* Fold a call to builtin significand, if radix == 2. */
9434 static tree
9435 fold_builtin_significand (tree arg, tree rettype)
9437 if (! validate_arg (arg, REAL_TYPE))
9438 return NULL_TREE;
9440 STRIP_NOPS (arg);
9442 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9444 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9446 switch (value->cl)
9448 case rvc_zero:
9449 case rvc_nan:
9450 case rvc_inf:
9451 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9452 return fold_convert (rettype, arg);
9453 case rvc_normal:
9454 /* For normal numbers, proceed iff radix == 2. */
9455 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9457 REAL_VALUE_TYPE result = *value;
9458 /* In GCC, normalized significands are in the range [0.5,
9459 1.0). We want them to be [1.0, 2.0) so set the
9460 exponent to 1. */
9461 SET_REAL_EXP (&result, 1);
9462 return build_real (rettype, result);
9464 break;
9468 return NULL_TREE;
9471 /* Fold a call to builtin frexp, we can assume the base is 2. */
9473 static tree
9474 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9476 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9477 return NULL_TREE;
9479 STRIP_NOPS (arg0);
9481 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9482 return NULL_TREE;
9484 arg1 = build_fold_indirect_ref (arg1);
9486 /* Proceed if a valid pointer type was passed in. */
9487 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9489 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9490 tree frac, exp;
9492 switch (value->cl)
9494 case rvc_zero:
9495 /* For +-0, return (*exp = 0, +-0). */
9496 exp = integer_zero_node;
9497 frac = arg0;
9498 break;
9499 case rvc_nan:
9500 case rvc_inf:
9501 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9502 return omit_one_operand (rettype, arg0, arg1);
9503 case rvc_normal:
9505 /* Since the frexp function always expects base 2, and in
9506 GCC normalized significands are already in the range
9507 [0.5, 1.0), we have exactly what frexp wants. */
9508 REAL_VALUE_TYPE frac_rvt = *value;
9509 SET_REAL_EXP (&frac_rvt, 0);
9510 frac = build_real (rettype, frac_rvt);
9511 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9513 break;
9514 default:
9515 gcc_unreachable ();
9518 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9519 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9520 TREE_SIDE_EFFECTS (arg1) = 1;
9521 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9524 return NULL_TREE;
9527 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9528 then we can assume the base is two. If it's false, then we have to
9529 check the mode of the TYPE parameter in certain cases. */
9531 static tree
9532 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9534 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9536 STRIP_NOPS (arg0);
9537 STRIP_NOPS (arg1);
9539 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9540 if (real_zerop (arg0) || integer_zerop (arg1)
9541 || (TREE_CODE (arg0) == REAL_CST
9542 && !real_isfinite (&TREE_REAL_CST (arg0))))
9543 return omit_one_operand (type, arg0, arg1);
9545 /* If both arguments are constant, then try to evaluate it. */
9546 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9547 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9548 && host_integerp (arg1, 0))
9550 /* Bound the maximum adjustment to twice the range of the
9551 mode's valid exponents. Use abs to ensure the range is
9552 positive as a sanity check. */
9553 const long max_exp_adj = 2 *
9554 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9555 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9557 /* Get the user-requested adjustment. */
9558 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9560 /* The requested adjustment must be inside this range. This
9561 is a preliminary cap to avoid things like overflow, we
9562 may still fail to compute the result for other reasons. */
9563 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9565 REAL_VALUE_TYPE initial_result;
9567 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9569 /* Ensure we didn't overflow. */
9570 if (! real_isinf (&initial_result))
9572 const REAL_VALUE_TYPE trunc_result
9573 = real_value_truncate (TYPE_MODE (type), initial_result);
9575 /* Only proceed if the target mode can hold the
9576 resulting value. */
9577 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9578 return build_real (type, trunc_result);
9584 return NULL_TREE;
9587 /* Fold a call to builtin modf. */
9589 static tree
9590 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9592 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9593 return NULL_TREE;
9595 STRIP_NOPS (arg0);
9597 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9598 return NULL_TREE;
9600 arg1 = build_fold_indirect_ref (arg1);
9602 /* Proceed if a valid pointer type was passed in. */
9603 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9605 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9606 REAL_VALUE_TYPE trunc, frac;
9608 switch (value->cl)
9610 case rvc_nan:
9611 case rvc_zero:
9612 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9613 trunc = frac = *value;
9614 break;
9615 case rvc_inf:
9616 /* For +-Inf, return (*arg1 = arg0, +-0). */
9617 frac = dconst0;
9618 frac.sign = value->sign;
9619 trunc = *value;
9620 break;
9621 case rvc_normal:
9622 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9623 real_trunc (&trunc, VOIDmode, value);
9624 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9625 /* If the original number was negative and already
9626 integral, then the fractional part is -0.0. */
9627 if (value->sign && frac.cl == rvc_zero)
9628 frac.sign = value->sign;
9629 break;
9632 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9633 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9634 build_real (rettype, trunc));
9635 TREE_SIDE_EFFECTS (arg1) = 1;
9636 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9637 build_real (rettype, frac));
9640 return NULL_TREE;
9643 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9644 ARG is the argument for the call. */
9646 static tree
9647 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9649 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9650 REAL_VALUE_TYPE r;
9652 if (!validate_arg (arg, REAL_TYPE))
9653 return NULL_TREE;
9655 switch (builtin_index)
9657 case BUILT_IN_ISINF:
9658 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9659 return omit_one_operand (type, integer_zero_node, arg);
9661 if (TREE_CODE (arg) == REAL_CST)
9663 r = TREE_REAL_CST (arg);
9664 if (real_isinf (&r))
9665 return real_compare (GT_EXPR, &r, &dconst0)
9666 ? integer_one_node : integer_minus_one_node;
9667 else
9668 return integer_zero_node;
9671 return NULL_TREE;
9673 case BUILT_IN_ISFINITE:
9674 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9675 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9676 return omit_one_operand (type, integer_one_node, arg);
9678 if (TREE_CODE (arg) == REAL_CST)
9680 r = TREE_REAL_CST (arg);
9681 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9684 return NULL_TREE;
9686 case BUILT_IN_ISNAN:
9687 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9688 return omit_one_operand (type, integer_zero_node, arg);
9690 if (TREE_CODE (arg) == REAL_CST)
9692 r = TREE_REAL_CST (arg);
9693 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9696 arg = builtin_save_expr (arg);
9697 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9699 default:
9700 gcc_unreachable ();
9704 /* Fold a call to an unordered comparison function such as
9705 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9706 being called and ARG0 and ARG1 are the arguments for the call.
9707 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9708 the opposite of the desired result. UNORDERED_CODE is used
9709 for modes that can hold NaNs and ORDERED_CODE is used for
9710 the rest. */
9712 static tree
9713 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9714 enum tree_code unordered_code,
9715 enum tree_code ordered_code)
9717 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9718 enum tree_code code;
9719 tree type0, type1;
9720 enum tree_code code0, code1;
9721 tree cmp_type = NULL_TREE;
9723 type0 = TREE_TYPE (arg0);
9724 type1 = TREE_TYPE (arg1);
9726 code0 = TREE_CODE (type0);
9727 code1 = TREE_CODE (type1);
9729 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9730 /* Choose the wider of two real types. */
9731 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9732 ? type0 : type1;
9733 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9734 cmp_type = type0;
9735 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9736 cmp_type = type1;
9738 arg0 = fold_convert (cmp_type, arg0);
9739 arg1 = fold_convert (cmp_type, arg1);
9741 if (unordered_code == UNORDERED_EXPR)
9743 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9744 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9745 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9748 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9749 : ordered_code;
9750 return fold_build1 (TRUTH_NOT_EXPR, type,
9751 fold_build2 (code, type, arg0, arg1));
9754 /* Fold a call to built-in function FNDECL with 0 arguments.
9755 IGNORE is true if the result of the function call is ignored. This
9756 function returns NULL_TREE if no simplification was possible. */
9758 static tree
9759 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9761 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9762 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9763 switch (fcode)
9765 CASE_FLT_FN (BUILT_IN_INF):
9766 case BUILT_IN_INFD32:
9767 case BUILT_IN_INFD64:
9768 case BUILT_IN_INFD128:
9769 return fold_builtin_inf (type, true);
9771 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9772 return fold_builtin_inf (type, false);
9774 case BUILT_IN_CLASSIFY_TYPE:
9775 return fold_builtin_classify_type (NULL_TREE);
9777 default:
9778 break;
9780 return NULL_TREE;
9783 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9784 IGNORE is true if the result of the function call is ignored. This
9785 function returns NULL_TREE if no simplification was possible. */
9787 static tree
9788 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9790 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9791 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9792 switch (fcode)
9795 case BUILT_IN_CONSTANT_P:
9797 tree val = fold_builtin_constant_p (arg0);
9799 /* Gimplification will pull the CALL_EXPR for the builtin out of
9800 an if condition. When not optimizing, we'll not CSE it back.
9801 To avoid link error types of regressions, return false now. */
9802 if (!val && !optimize)
9803 val = integer_zero_node;
9805 return val;
9808 case BUILT_IN_CLASSIFY_TYPE:
9809 return fold_builtin_classify_type (arg0);
9811 case BUILT_IN_STRLEN:
9812 return fold_builtin_strlen (arg0);
9814 CASE_FLT_FN (BUILT_IN_FABS):
9815 return fold_builtin_fabs (arg0, type);
9817 case BUILT_IN_ABS:
9818 case BUILT_IN_LABS:
9819 case BUILT_IN_LLABS:
9820 case BUILT_IN_IMAXABS:
9821 return fold_builtin_abs (arg0, type);
9823 CASE_FLT_FN (BUILT_IN_CONJ):
9824 if (validate_arg (arg0, COMPLEX_TYPE))
9825 return fold_build1 (CONJ_EXPR, type, arg0);
9826 break;
9828 CASE_FLT_FN (BUILT_IN_CREAL):
9829 if (validate_arg (arg0, COMPLEX_TYPE))
9830 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9831 break;
9833 CASE_FLT_FN (BUILT_IN_CIMAG):
9834 if (validate_arg (arg0, COMPLEX_TYPE))
9835 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9836 break;
9838 CASE_FLT_FN (BUILT_IN_CCOS):
9839 CASE_FLT_FN (BUILT_IN_CCOSH):
9840 /* These functions are "even", i.e. f(x) == f(-x). */
9841 if (validate_arg (arg0, COMPLEX_TYPE))
9843 tree narg = fold_strip_sign_ops (arg0);
9844 if (narg)
9845 return build_call_expr (fndecl, 1, narg);
9847 break;
9849 CASE_FLT_FN (BUILT_IN_CABS):
9850 return fold_builtin_cabs (arg0, type, fndecl);
9852 CASE_FLT_FN (BUILT_IN_CARG):
9853 return fold_builtin_carg (arg0, type);
9855 CASE_FLT_FN (BUILT_IN_SQRT):
9856 return fold_builtin_sqrt (arg0, type);
9858 CASE_FLT_FN (BUILT_IN_CBRT):
9859 return fold_builtin_cbrt (arg0, type);
9861 CASE_FLT_FN (BUILT_IN_ASIN):
9862 if (validate_arg (arg0, REAL_TYPE))
9863 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9864 &dconstm1, &dconst1, true);
9865 break;
9867 CASE_FLT_FN (BUILT_IN_ACOS):
9868 if (validate_arg (arg0, REAL_TYPE))
9869 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9870 &dconstm1, &dconst1, true);
9871 break;
9873 CASE_FLT_FN (BUILT_IN_ATAN):
9874 if (validate_arg (arg0, REAL_TYPE))
9875 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9876 break;
9878 CASE_FLT_FN (BUILT_IN_ASINH):
9879 if (validate_arg (arg0, REAL_TYPE))
9880 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9881 break;
9883 CASE_FLT_FN (BUILT_IN_ACOSH):
9884 if (validate_arg (arg0, REAL_TYPE))
9885 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9886 &dconst1, NULL, true);
9887 break;
9889 CASE_FLT_FN (BUILT_IN_ATANH):
9890 if (validate_arg (arg0, REAL_TYPE))
9891 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9892 &dconstm1, &dconst1, false);
9893 break;
9895 CASE_FLT_FN (BUILT_IN_SIN):
9896 if (validate_arg (arg0, REAL_TYPE))
9897 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9898 break;
9900 CASE_FLT_FN (BUILT_IN_COS):
9901 return fold_builtin_cos (arg0, type, fndecl);
9902 break;
9904 CASE_FLT_FN (BUILT_IN_TAN):
9905 return fold_builtin_tan (arg0, type);
9907 CASE_FLT_FN (BUILT_IN_CEXP):
9908 return fold_builtin_cexp (arg0, type);
9910 CASE_FLT_FN (BUILT_IN_CEXPI):
9911 if (validate_arg (arg0, REAL_TYPE))
9912 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9913 break;
9915 CASE_FLT_FN (BUILT_IN_SINH):
9916 if (validate_arg (arg0, REAL_TYPE))
9917 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9918 break;
9920 CASE_FLT_FN (BUILT_IN_COSH):
9921 return fold_builtin_cosh (arg0, type, fndecl);
9923 CASE_FLT_FN (BUILT_IN_TANH):
9924 if (validate_arg (arg0, REAL_TYPE))
9925 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9926 break;
9928 CASE_FLT_FN (BUILT_IN_ERF):
9929 if (validate_arg (arg0, REAL_TYPE))
9930 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9931 break;
9933 CASE_FLT_FN (BUILT_IN_ERFC):
9934 if (validate_arg (arg0, REAL_TYPE))
9935 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9936 break;
9938 CASE_FLT_FN (BUILT_IN_TGAMMA):
9939 if (validate_arg (arg0, REAL_TYPE))
9940 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9941 break;
9943 CASE_FLT_FN (BUILT_IN_EXP):
9944 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9946 CASE_FLT_FN (BUILT_IN_EXP2):
9947 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9949 CASE_FLT_FN (BUILT_IN_EXP10):
9950 CASE_FLT_FN (BUILT_IN_POW10):
9951 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9953 CASE_FLT_FN (BUILT_IN_EXPM1):
9954 if (validate_arg (arg0, REAL_TYPE))
9955 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9956 break;
9958 CASE_FLT_FN (BUILT_IN_LOG):
9959 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9961 CASE_FLT_FN (BUILT_IN_LOG2):
9962 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9964 CASE_FLT_FN (BUILT_IN_LOG10):
9965 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9967 CASE_FLT_FN (BUILT_IN_LOG1P):
9968 if (validate_arg (arg0, REAL_TYPE))
9969 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9970 &dconstm1, NULL, false);
9971 break;
9973 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9974 CASE_FLT_FN (BUILT_IN_J0):
9975 if (validate_arg (arg0, REAL_TYPE))
9976 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9977 NULL, NULL, 0);
9978 break;
9980 CASE_FLT_FN (BUILT_IN_J1):
9981 if (validate_arg (arg0, REAL_TYPE))
9982 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9983 NULL, NULL, 0);
9984 break;
9986 CASE_FLT_FN (BUILT_IN_Y0):
9987 if (validate_arg (arg0, REAL_TYPE))
9988 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9989 &dconst0, NULL, false);
9990 break;
9992 CASE_FLT_FN (BUILT_IN_Y1):
9993 if (validate_arg (arg0, REAL_TYPE))
9994 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9995 &dconst0, NULL, false);
9996 break;
9997 #endif
9999 CASE_FLT_FN (BUILT_IN_NAN):
10000 case BUILT_IN_NAND32:
10001 case BUILT_IN_NAND64:
10002 case BUILT_IN_NAND128:
10003 return fold_builtin_nan (arg0, type, true);
10005 CASE_FLT_FN (BUILT_IN_NANS):
10006 return fold_builtin_nan (arg0, type, false);
10008 CASE_FLT_FN (BUILT_IN_FLOOR):
10009 return fold_builtin_floor (fndecl, arg0);
10011 CASE_FLT_FN (BUILT_IN_CEIL):
10012 return fold_builtin_ceil (fndecl, arg0);
10014 CASE_FLT_FN (BUILT_IN_TRUNC):
10015 return fold_builtin_trunc (fndecl, arg0);
10017 CASE_FLT_FN (BUILT_IN_ROUND):
10018 return fold_builtin_round (fndecl, arg0);
10020 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10021 CASE_FLT_FN (BUILT_IN_RINT):
10022 return fold_trunc_transparent_mathfn (fndecl, arg0);
10024 CASE_FLT_FN (BUILT_IN_LCEIL):
10025 CASE_FLT_FN (BUILT_IN_LLCEIL):
10026 CASE_FLT_FN (BUILT_IN_LFLOOR):
10027 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10028 CASE_FLT_FN (BUILT_IN_LROUND):
10029 CASE_FLT_FN (BUILT_IN_LLROUND):
10030 return fold_builtin_int_roundingfn (fndecl, arg0);
10032 CASE_FLT_FN (BUILT_IN_LRINT):
10033 CASE_FLT_FN (BUILT_IN_LLRINT):
10034 return fold_fixed_mathfn (fndecl, arg0);
10036 case BUILT_IN_BSWAP32:
10037 case BUILT_IN_BSWAP64:
10038 return fold_builtin_bswap (fndecl, arg0);
10040 CASE_INT_FN (BUILT_IN_FFS):
10041 CASE_INT_FN (BUILT_IN_CLZ):
10042 CASE_INT_FN (BUILT_IN_CTZ):
10043 CASE_INT_FN (BUILT_IN_POPCOUNT):
10044 CASE_INT_FN (BUILT_IN_PARITY):
10045 return fold_builtin_bitop (fndecl, arg0);
10047 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10048 return fold_builtin_signbit (arg0, type);
10050 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10051 return fold_builtin_significand (arg0, type);
10053 CASE_FLT_FN (BUILT_IN_ILOGB):
10054 CASE_FLT_FN (BUILT_IN_LOGB):
10055 return fold_builtin_logb (arg0, type);
10057 case BUILT_IN_ISASCII:
10058 return fold_builtin_isascii (arg0);
10060 case BUILT_IN_TOASCII:
10061 return fold_builtin_toascii (arg0);
10063 case BUILT_IN_ISDIGIT:
10064 return fold_builtin_isdigit (arg0);
10066 CASE_FLT_FN (BUILT_IN_FINITE):
10067 case BUILT_IN_FINITED32:
10068 case BUILT_IN_FINITED64:
10069 case BUILT_IN_FINITED128:
10070 case BUILT_IN_ISFINITE:
10071 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10073 CASE_FLT_FN (BUILT_IN_ISINF):
10074 case BUILT_IN_ISINFD32:
10075 case BUILT_IN_ISINFD64:
10076 case BUILT_IN_ISINFD128:
10077 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10079 CASE_FLT_FN (BUILT_IN_ISNAN):
10080 case BUILT_IN_ISNAND32:
10081 case BUILT_IN_ISNAND64:
10082 case BUILT_IN_ISNAND128:
10083 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10085 case BUILT_IN_PRINTF:
10086 case BUILT_IN_PRINTF_UNLOCKED:
10087 case BUILT_IN_VPRINTF:
10088 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10090 default:
10091 break;
10094 return NULL_TREE;
10098 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10099 IGNORE is true if the result of the function call is ignored. This
10100 function returns NULL_TREE if no simplification was possible. */
10102 static tree
10103 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10105 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10106 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10108 switch (fcode)
10110 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10111 CASE_FLT_FN (BUILT_IN_JN):
10112 if (validate_arg (arg0, INTEGER_TYPE)
10113 && validate_arg (arg1, REAL_TYPE))
10114 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10115 break;
10117 CASE_FLT_FN (BUILT_IN_YN):
10118 if (validate_arg (arg0, INTEGER_TYPE)
10119 && validate_arg (arg1, REAL_TYPE))
10120 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10121 &dconst0, false);
10122 break;
10124 CASE_FLT_FN (BUILT_IN_DREM):
10125 CASE_FLT_FN (BUILT_IN_REMAINDER):
10126 if (validate_arg (arg0, REAL_TYPE)
10127 && validate_arg(arg1, REAL_TYPE))
10128 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10129 break;
10131 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10132 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10133 if (validate_arg (arg0, REAL_TYPE)
10134 && validate_arg(arg1, POINTER_TYPE))
10135 return do_mpfr_lgamma_r (arg0, arg1, type);
10136 break;
10137 #endif
10139 CASE_FLT_FN (BUILT_IN_ATAN2):
10140 if (validate_arg (arg0, REAL_TYPE)
10141 && validate_arg(arg1, REAL_TYPE))
10142 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10143 break;
10145 CASE_FLT_FN (BUILT_IN_FDIM):
10146 if (validate_arg (arg0, REAL_TYPE)
10147 && validate_arg(arg1, REAL_TYPE))
10148 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10149 break;
10151 CASE_FLT_FN (BUILT_IN_HYPOT):
10152 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10154 CASE_FLT_FN (BUILT_IN_LDEXP):
10155 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10156 CASE_FLT_FN (BUILT_IN_SCALBN):
10157 CASE_FLT_FN (BUILT_IN_SCALBLN):
10158 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10160 CASE_FLT_FN (BUILT_IN_FREXP):
10161 return fold_builtin_frexp (arg0, arg1, type);
10163 CASE_FLT_FN (BUILT_IN_MODF):
10164 return fold_builtin_modf (arg0, arg1, type);
10166 case BUILT_IN_BZERO:
10167 return fold_builtin_bzero (arg0, arg1, ignore);
10169 case BUILT_IN_FPUTS:
10170 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10172 case BUILT_IN_FPUTS_UNLOCKED:
10173 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10175 case BUILT_IN_STRSTR:
10176 return fold_builtin_strstr (arg0, arg1, type);
10178 case BUILT_IN_STRCAT:
10179 return fold_builtin_strcat (arg0, arg1);
10181 case BUILT_IN_STRSPN:
10182 return fold_builtin_strspn (arg0, arg1);
10184 case BUILT_IN_STRCSPN:
10185 return fold_builtin_strcspn (arg0, arg1);
10187 case BUILT_IN_STRCHR:
10188 case BUILT_IN_INDEX:
10189 return fold_builtin_strchr (arg0, arg1, type);
10191 case BUILT_IN_STRRCHR:
10192 case BUILT_IN_RINDEX:
10193 return fold_builtin_strrchr (arg0, arg1, type);
10195 case BUILT_IN_STRCPY:
10196 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10198 case BUILT_IN_STRCMP:
10199 return fold_builtin_strcmp (arg0, arg1);
10201 case BUILT_IN_STRPBRK:
10202 return fold_builtin_strpbrk (arg0, arg1, type);
10204 case BUILT_IN_EXPECT:
10205 return fold_builtin_expect (arg0, arg1);
10207 CASE_FLT_FN (BUILT_IN_POW):
10208 return fold_builtin_pow (fndecl, arg0, arg1, type);
10210 CASE_FLT_FN (BUILT_IN_POWI):
10211 return fold_builtin_powi (fndecl, arg0, arg1, type);
10213 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10214 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10216 CASE_FLT_FN (BUILT_IN_FMIN):
10217 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10219 CASE_FLT_FN (BUILT_IN_FMAX):
10220 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10222 case BUILT_IN_ISGREATER:
10223 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10224 case BUILT_IN_ISGREATEREQUAL:
10225 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10226 case BUILT_IN_ISLESS:
10227 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10228 case BUILT_IN_ISLESSEQUAL:
10229 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10230 case BUILT_IN_ISLESSGREATER:
10231 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10232 case BUILT_IN_ISUNORDERED:
10233 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10234 NOP_EXPR);
10236 /* We do the folding for va_start in the expander. */
10237 case BUILT_IN_VA_START:
10238 break;
10240 case BUILT_IN_SPRINTF:
10241 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10243 case BUILT_IN_OBJECT_SIZE:
10244 return fold_builtin_object_size (arg0, arg1);
10246 case BUILT_IN_PRINTF:
10247 case BUILT_IN_PRINTF_UNLOCKED:
10248 case BUILT_IN_VPRINTF:
10249 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10251 case BUILT_IN_PRINTF_CHK:
10252 case BUILT_IN_VPRINTF_CHK:
10253 if (!validate_arg (arg0, INTEGER_TYPE)
10254 || TREE_SIDE_EFFECTS (arg0))
10255 return NULL_TREE;
10256 else
10257 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10258 break;
10260 case BUILT_IN_FPRINTF:
10261 case BUILT_IN_FPRINTF_UNLOCKED:
10262 case BUILT_IN_VFPRINTF:
10263 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10264 ignore, fcode);
10266 default:
10267 break;
10269 return NULL_TREE;
10272 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10273 and ARG2. IGNORE is true if the result of the function call is ignored.
10274 This function returns NULL_TREE if no simplification was possible. */
10276 static tree
10277 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10279 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10280 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10281 switch (fcode)
10284 CASE_FLT_FN (BUILT_IN_SINCOS):
10285 return fold_builtin_sincos (arg0, arg1, arg2);
10287 CASE_FLT_FN (BUILT_IN_FMA):
10288 if (validate_arg (arg0, REAL_TYPE)
10289 && validate_arg(arg1, REAL_TYPE)
10290 && validate_arg(arg2, REAL_TYPE))
10291 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10292 break;
10294 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10295 CASE_FLT_FN (BUILT_IN_REMQUO):
10296 if (validate_arg (arg0, REAL_TYPE)
10297 && validate_arg(arg1, REAL_TYPE)
10298 && validate_arg(arg2, POINTER_TYPE))
10299 return do_mpfr_remquo (arg0, arg1, arg2);
10300 break;
10301 #endif
10303 case BUILT_IN_MEMSET:
10304 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10306 case BUILT_IN_BCOPY:
10307 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10309 case BUILT_IN_MEMCPY:
10310 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10312 case BUILT_IN_MEMPCPY:
10313 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10315 case BUILT_IN_MEMMOVE:
10316 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10318 case BUILT_IN_STRNCAT:
10319 return fold_builtin_strncat (arg0, arg1, arg2);
10321 case BUILT_IN_STRNCPY:
10322 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10324 case BUILT_IN_STRNCMP:
10325 return fold_builtin_strncmp (arg0, arg1, arg2);
10327 case BUILT_IN_MEMCHR:
10328 return fold_builtin_memchr (arg0, arg1, arg2, type);
10330 case BUILT_IN_BCMP:
10331 case BUILT_IN_MEMCMP:
10332 return fold_builtin_memcmp (arg0, arg1, arg2);;
10334 case BUILT_IN_SPRINTF:
10335 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10337 case BUILT_IN_STRCPY_CHK:
10338 case BUILT_IN_STPCPY_CHK:
10339 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10340 ignore, fcode);
10342 case BUILT_IN_STRCAT_CHK:
10343 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10345 case BUILT_IN_PRINTF_CHK:
10346 case BUILT_IN_VPRINTF_CHK:
10347 if (!validate_arg (arg0, INTEGER_TYPE)
10348 || TREE_SIDE_EFFECTS (arg0))
10349 return NULL_TREE;
10350 else
10351 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10352 break;
10354 case BUILT_IN_FPRINTF:
10355 case BUILT_IN_FPRINTF_UNLOCKED:
10356 case BUILT_IN_VFPRINTF:
10357 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10359 case BUILT_IN_FPRINTF_CHK:
10360 case BUILT_IN_VFPRINTF_CHK:
10361 if (!validate_arg (arg1, INTEGER_TYPE)
10362 || TREE_SIDE_EFFECTS (arg1))
10363 return NULL_TREE;
10364 else
10365 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10366 ignore, fcode);
10368 default:
10369 break;
10371 return NULL_TREE;
10374 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10375 ARG2, and ARG3. IGNORE is true if the result of the function call is
10376 ignored. This function returns NULL_TREE if no simplification was
10377 possible. */
10379 static tree
10380 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10381 bool ignore)
10383 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10385 switch (fcode)
10387 case BUILT_IN_MEMCPY_CHK:
10388 case BUILT_IN_MEMPCPY_CHK:
10389 case BUILT_IN_MEMMOVE_CHK:
10390 case BUILT_IN_MEMSET_CHK:
10391 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10392 NULL_TREE, ignore,
10393 DECL_FUNCTION_CODE (fndecl));
10395 case BUILT_IN_STRNCPY_CHK:
10396 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10398 case BUILT_IN_STRNCAT_CHK:
10399 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10401 case BUILT_IN_FPRINTF_CHK:
10402 case BUILT_IN_VFPRINTF_CHK:
10403 if (!validate_arg (arg1, INTEGER_TYPE)
10404 || TREE_SIDE_EFFECTS (arg1))
10405 return NULL_TREE;
10406 else
10407 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10408 ignore, fcode);
10409 break;
10411 default:
10412 break;
10414 return NULL_TREE;
10417 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10418 arguments, where NARGS <= 4. IGNORE is true if the result of the
10419 function call is ignored. This function returns NULL_TREE if no
10420 simplification was possible. Note that this only folds builtins with
10421 fixed argument patterns. Foldings that do varargs-to-varargs
10422 transformations, or that match calls with more than 4 arguments,
10423 need to be handled with fold_builtin_varargs instead. */
10425 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10427 static tree
10428 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10430 tree ret = NULL_TREE;
10432 switch (nargs)
10434 case 0:
10435 ret = fold_builtin_0 (fndecl, ignore);
10436 break;
10437 case 1:
10438 ret = fold_builtin_1 (fndecl, args[0], ignore);
10439 break;
10440 case 2:
10441 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10442 break;
10443 case 3:
10444 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10445 break;
10446 case 4:
10447 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10448 ignore);
10449 break;
10450 default:
10451 break;
10453 if (ret)
10455 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10456 TREE_NO_WARNING (ret) = 1;
10457 return ret;
10459 return NULL_TREE;
10462 /* Builtins with folding operations that operate on "..." arguments
10463 need special handling; we need to store the arguments in a convenient
10464 data structure before attempting any folding. Fortunately there are
10465 only a few builtins that fall into this category. FNDECL is the
10466 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10467 result of the function call is ignored. */
10469 static tree
10470 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10472 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10473 tree ret = NULL_TREE;
10475 switch (fcode)
10477 case BUILT_IN_SPRINTF_CHK:
10478 case BUILT_IN_VSPRINTF_CHK:
10479 ret = fold_builtin_sprintf_chk (exp, fcode);
10480 break;
10482 case BUILT_IN_SNPRINTF_CHK:
10483 case BUILT_IN_VSNPRINTF_CHK:
10484 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10486 default:
10487 break;
10489 if (ret)
10491 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10492 TREE_NO_WARNING (ret) = 1;
10493 return ret;
10495 return NULL_TREE;
10498 /* A wrapper function for builtin folding that prevents warnings for
10499 "statement without effect" and the like, caused by removing the
10500 call node earlier than the warning is generated. */
10502 tree
10503 fold_call_expr (tree exp, bool ignore)
10505 tree ret = NULL_TREE;
10506 tree fndecl = get_callee_fndecl (exp);
10507 if (fndecl
10508 && TREE_CODE (fndecl) == FUNCTION_DECL
10509 && DECL_BUILT_IN (fndecl)
10510 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10511 yet. Defer folding until we see all the arguments
10512 (after inlining). */
10513 && !CALL_EXPR_VA_ARG_PACK (exp))
10515 int nargs = call_expr_nargs (exp);
10517 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10518 instead last argument is __builtin_va_arg_pack (). Defer folding
10519 even in that case, until arguments are finalized. */
10520 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10522 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10523 if (fndecl2
10524 && TREE_CODE (fndecl2) == FUNCTION_DECL
10525 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10526 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10527 return NULL_TREE;
10530 /* FIXME: Don't use a list in this interface. */
10531 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10532 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10533 else
10535 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10537 tree *args = CALL_EXPR_ARGP (exp);
10538 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10540 if (!ret)
10541 ret = fold_builtin_varargs (fndecl, exp, ignore);
10542 if (ret)
10544 /* Propagate location information from original call to
10545 expansion of builtin. Otherwise things like
10546 maybe_emit_chk_warning, that operate on the expansion
10547 of a builtin, will use the wrong location information. */
10548 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10550 tree realret = ret;
10551 if (TREE_CODE (ret) == NOP_EXPR)
10552 realret = TREE_OPERAND (ret, 0);
10553 if (CAN_HAVE_LOCATION_P (realret)
10554 && !EXPR_HAS_LOCATION (realret))
10555 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10556 return realret;
10558 return ret;
10562 return NULL_TREE;
10565 /* Conveniently construct a function call expression. FNDECL names the
10566 function to be called and ARGLIST is a TREE_LIST of arguments. */
10568 tree
10569 build_function_call_expr (tree fndecl, tree arglist)
10571 tree fntype = TREE_TYPE (fndecl);
10572 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10573 int n = list_length (arglist);
10574 tree *argarray = (tree *) alloca (n * sizeof (tree));
10575 int i;
10577 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10578 argarray[i] = TREE_VALUE (arglist);
10579 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10582 /* Conveniently construct a function call expression. FNDECL names the
10583 function to be called, N is the number of arguments, and the "..."
10584 parameters are the argument expressions. */
10586 tree
10587 build_call_expr (tree fndecl, int n, ...)
10589 va_list ap;
10590 tree fntype = TREE_TYPE (fndecl);
10591 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10592 tree *argarray = (tree *) alloca (n * sizeof (tree));
10593 int i;
10595 va_start (ap, n);
10596 for (i = 0; i < n; i++)
10597 argarray[i] = va_arg (ap, tree);
10598 va_end (ap);
10599 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10602 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10603 N arguments are passed in the array ARGARRAY. */
10605 tree
10606 fold_builtin_call_array (tree type,
10607 tree fn,
10608 int n,
10609 tree *argarray)
10611 tree ret = NULL_TREE;
10612 int i;
10613 tree exp;
10615 if (TREE_CODE (fn) == ADDR_EXPR)
10617 tree fndecl = TREE_OPERAND (fn, 0);
10618 if (TREE_CODE (fndecl) == FUNCTION_DECL
10619 && DECL_BUILT_IN (fndecl))
10621 /* If last argument is __builtin_va_arg_pack (), arguments to this
10622 function are not finalized yet. Defer folding until they are. */
10623 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10625 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10626 if (fndecl2
10627 && TREE_CODE (fndecl2) == FUNCTION_DECL
10628 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10629 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10630 return build_call_array (type, fn, n, argarray);
10632 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10634 tree arglist = NULL_TREE;
10635 for (i = n - 1; i >= 0; i--)
10636 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10637 ret = targetm.fold_builtin (fndecl, arglist, false);
10638 if (ret)
10639 return ret;
10641 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10643 /* First try the transformations that don't require consing up
10644 an exp. */
10645 ret = fold_builtin_n (fndecl, argarray, n, false);
10646 if (ret)
10647 return ret;
10650 /* If we got this far, we need to build an exp. */
10651 exp = build_call_array (type, fn, n, argarray);
10652 ret = fold_builtin_varargs (fndecl, exp, false);
10653 return ret ? ret : exp;
10657 return build_call_array (type, fn, n, argarray);
10660 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10661 along with N new arguments specified as the "..." parameters. SKIP
10662 is the number of arguments in EXP to be omitted. This function is used
10663 to do varargs-to-varargs transformations. */
10665 static tree
10666 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10668 int oldnargs = call_expr_nargs (exp);
10669 int nargs = oldnargs - skip + n;
10670 tree fntype = TREE_TYPE (fndecl);
10671 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10672 tree *buffer;
10674 if (n > 0)
10676 int i, j;
10677 va_list ap;
10679 buffer = alloca (nargs * sizeof (tree));
10680 va_start (ap, n);
10681 for (i = 0; i < n; i++)
10682 buffer[i] = va_arg (ap, tree);
10683 va_end (ap);
10684 for (j = skip; j < oldnargs; j++, i++)
10685 buffer[i] = CALL_EXPR_ARG (exp, j);
10687 else
10688 buffer = CALL_EXPR_ARGP (exp) + skip;
10690 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10693 /* Validate a single argument ARG against a tree code CODE representing
10694 a type. */
10696 static bool
10697 validate_arg (const_tree arg, enum tree_code code)
10699 if (!arg)
10700 return false;
10701 else if (code == POINTER_TYPE)
10702 return POINTER_TYPE_P (TREE_TYPE (arg));
10703 else if (code == INTEGER_TYPE)
10704 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10705 return code == TREE_CODE (TREE_TYPE (arg));
10708 /* This function validates the types of a function call argument list
10709 against a specified list of tree_codes. If the last specifier is a 0,
10710 that represents an ellipses, otherwise the last specifier must be a
10711 VOID_TYPE. */
10713 bool
10714 validate_arglist (const_tree callexpr, ...)
10716 enum tree_code code;
10717 bool res = 0;
10718 va_list ap;
10719 const_call_expr_arg_iterator iter;
10720 const_tree arg;
10722 va_start (ap, callexpr);
10723 init_const_call_expr_arg_iterator (callexpr, &iter);
10727 code = va_arg (ap, enum tree_code);
10728 switch (code)
10730 case 0:
10731 /* This signifies an ellipses, any further arguments are all ok. */
10732 res = true;
10733 goto end;
10734 case VOID_TYPE:
10735 /* This signifies an endlink, if no arguments remain, return
10736 true, otherwise return false. */
10737 res = !more_const_call_expr_args_p (&iter);
10738 goto end;
10739 default:
10740 /* If no parameters remain or the parameter's code does not
10741 match the specified code, return false. Otherwise continue
10742 checking any remaining arguments. */
10743 arg = next_const_call_expr_arg (&iter);
10744 if (!validate_arg (arg, code))
10745 goto end;
10746 break;
10749 while (1);
10751 /* We need gotos here since we can only have one VA_CLOSE in a
10752 function. */
10753 end: ;
10754 va_end (ap);
10756 return res;
10759 /* Default target-specific builtin expander that does nothing. */
10762 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10763 rtx target ATTRIBUTE_UNUSED,
10764 rtx subtarget ATTRIBUTE_UNUSED,
10765 enum machine_mode mode ATTRIBUTE_UNUSED,
10766 int ignore ATTRIBUTE_UNUSED)
10768 return NULL_RTX;
10771 /* Returns true is EXP represents data that would potentially reside
10772 in a readonly section. */
10774 static bool
10775 readonly_data_expr (tree exp)
10777 STRIP_NOPS (exp);
10779 if (TREE_CODE (exp) != ADDR_EXPR)
10780 return false;
10782 exp = get_base_address (TREE_OPERAND (exp, 0));
10783 if (!exp)
10784 return false;
10786 /* Make sure we call decl_readonly_section only for trees it
10787 can handle (since it returns true for everything it doesn't
10788 understand). */
10789 if (TREE_CODE (exp) == STRING_CST
10790 || TREE_CODE (exp) == CONSTRUCTOR
10791 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10792 return decl_readonly_section (exp, 0);
10793 else
10794 return false;
10797 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10798 to the call, and TYPE is its return type.
10800 Return NULL_TREE if no simplification was possible, otherwise return the
10801 simplified form of the call as a tree.
10803 The simplified form may be a constant or other expression which
10804 computes the same value, but in a more efficient manner (including
10805 calls to other builtin functions).
10807 The call may contain arguments which need to be evaluated, but
10808 which are not useful to determine the result of the call. In
10809 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10810 COMPOUND_EXPR will be an argument which must be evaluated.
10811 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10812 COMPOUND_EXPR in the chain will contain the tree for the simplified
10813 form of the builtin function call. */
10815 static tree
10816 fold_builtin_strstr (tree s1, tree s2, tree type)
10818 if (!validate_arg (s1, POINTER_TYPE)
10819 || !validate_arg (s2, POINTER_TYPE))
10820 return NULL_TREE;
10821 else
10823 tree fn;
10824 const char *p1, *p2;
10826 p2 = c_getstr (s2);
10827 if (p2 == NULL)
10828 return NULL_TREE;
10830 p1 = c_getstr (s1);
10831 if (p1 != NULL)
10833 const char *r = strstr (p1, p2);
10834 tree tem;
10836 if (r == NULL)
10837 return build_int_cst (TREE_TYPE (s1), 0);
10839 /* Return an offset into the constant string argument. */
10840 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10841 s1, size_int (r - p1));
10842 return fold_convert (type, tem);
10845 /* The argument is const char *, and the result is char *, so we need
10846 a type conversion here to avoid a warning. */
10847 if (p2[0] == '\0')
10848 return fold_convert (type, s1);
10850 if (p2[1] != '\0')
10851 return NULL_TREE;
10853 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10854 if (!fn)
10855 return NULL_TREE;
10857 /* New argument list transforming strstr(s1, s2) to
10858 strchr(s1, s2[0]). */
10859 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10863 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10864 the call, and TYPE is its return type.
10866 Return NULL_TREE if no simplification was possible, otherwise return the
10867 simplified form of the call as a tree.
10869 The simplified form may be a constant or other expression which
10870 computes the same value, but in a more efficient manner (including
10871 calls to other builtin functions).
10873 The call may contain arguments which need to be evaluated, but
10874 which are not useful to determine the result of the call. In
10875 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10876 COMPOUND_EXPR will be an argument which must be evaluated.
10877 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10878 COMPOUND_EXPR in the chain will contain the tree for the simplified
10879 form of the builtin function call. */
10881 static tree
10882 fold_builtin_strchr (tree s1, tree s2, tree type)
10884 if (!validate_arg (s1, POINTER_TYPE)
10885 || !validate_arg (s2, INTEGER_TYPE))
10886 return NULL_TREE;
10887 else
10889 const char *p1;
10891 if (TREE_CODE (s2) != INTEGER_CST)
10892 return NULL_TREE;
10894 p1 = c_getstr (s1);
10895 if (p1 != NULL)
10897 char c;
10898 const char *r;
10899 tree tem;
10901 if (target_char_cast (s2, &c))
10902 return NULL_TREE;
10904 r = strchr (p1, c);
10906 if (r == NULL)
10907 return build_int_cst (TREE_TYPE (s1), 0);
10909 /* Return an offset into the constant string argument. */
10910 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10911 s1, size_int (r - p1));
10912 return fold_convert (type, tem);
10914 return NULL_TREE;
10918 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10919 the call, and TYPE is its return type.
10921 Return NULL_TREE if no simplification was possible, otherwise return the
10922 simplified form of the call as a tree.
10924 The simplified form may be a constant or other expression which
10925 computes the same value, but in a more efficient manner (including
10926 calls to other builtin functions).
10928 The call may contain arguments which need to be evaluated, but
10929 which are not useful to determine the result of the call. In
10930 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10931 COMPOUND_EXPR will be an argument which must be evaluated.
10932 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10933 COMPOUND_EXPR in the chain will contain the tree for the simplified
10934 form of the builtin function call. */
10936 static tree
10937 fold_builtin_strrchr (tree s1, tree s2, tree type)
10939 if (!validate_arg (s1, POINTER_TYPE)
10940 || !validate_arg (s2, INTEGER_TYPE))
10941 return NULL_TREE;
10942 else
10944 tree fn;
10945 const char *p1;
10947 if (TREE_CODE (s2) != INTEGER_CST)
10948 return NULL_TREE;
10950 p1 = c_getstr (s1);
10951 if (p1 != NULL)
10953 char c;
10954 const char *r;
10955 tree tem;
10957 if (target_char_cast (s2, &c))
10958 return NULL_TREE;
10960 r = strrchr (p1, c);
10962 if (r == NULL)
10963 return build_int_cst (TREE_TYPE (s1), 0);
10965 /* Return an offset into the constant string argument. */
10966 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10967 s1, size_int (r - p1));
10968 return fold_convert (type, tem);
10971 if (! integer_zerop (s2))
10972 return NULL_TREE;
10974 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10975 if (!fn)
10976 return NULL_TREE;
10978 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10979 return build_call_expr (fn, 2, s1, s2);
10983 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10984 to the call, and TYPE is its return type.
10986 Return NULL_TREE if no simplification was possible, otherwise return the
10987 simplified form of the call as a tree.
10989 The simplified form may be a constant or other expression which
10990 computes the same value, but in a more efficient manner (including
10991 calls to other builtin functions).
10993 The call may contain arguments which need to be evaluated, but
10994 which are not useful to determine the result of the call. In
10995 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10996 COMPOUND_EXPR will be an argument which must be evaluated.
10997 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10998 COMPOUND_EXPR in the chain will contain the tree for the simplified
10999 form of the builtin function call. */
11001 static tree
11002 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11004 if (!validate_arg (s1, POINTER_TYPE)
11005 || !validate_arg (s2, POINTER_TYPE))
11006 return NULL_TREE;
11007 else
11009 tree fn;
11010 const char *p1, *p2;
11012 p2 = c_getstr (s2);
11013 if (p2 == NULL)
11014 return NULL_TREE;
11016 p1 = c_getstr (s1);
11017 if (p1 != NULL)
11019 const char *r = strpbrk (p1, p2);
11020 tree tem;
11022 if (r == NULL)
11023 return build_int_cst (TREE_TYPE (s1), 0);
11025 /* Return an offset into the constant string argument. */
11026 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11027 s1, size_int (r - p1));
11028 return fold_convert (type, tem);
11031 if (p2[0] == '\0')
11032 /* strpbrk(x, "") == NULL.
11033 Evaluate and ignore s1 in case it had side-effects. */
11034 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11036 if (p2[1] != '\0')
11037 return NULL_TREE; /* Really call strpbrk. */
11039 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11040 if (!fn)
11041 return NULL_TREE;
11043 /* New argument list transforming strpbrk(s1, s2) to
11044 strchr(s1, s2[0]). */
11045 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11049 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11050 to the call.
11052 Return NULL_TREE if no simplification was possible, otherwise return the
11053 simplified form of the call as a tree.
11055 The simplified form may be a constant or other expression which
11056 computes the same value, but in a more efficient manner (including
11057 calls to other builtin functions).
11059 The call may contain arguments which need to be evaluated, but
11060 which are not useful to determine the result of the call. In
11061 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11062 COMPOUND_EXPR will be an argument which must be evaluated.
11063 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11064 COMPOUND_EXPR in the chain will contain the tree for the simplified
11065 form of the builtin function call. */
11067 static tree
11068 fold_builtin_strcat (tree dst, tree src)
11070 if (!validate_arg (dst, POINTER_TYPE)
11071 || !validate_arg (src, POINTER_TYPE))
11072 return NULL_TREE;
11073 else
11075 const char *p = c_getstr (src);
11077 /* If the string length is zero, return the dst parameter. */
11078 if (p && *p == '\0')
11079 return dst;
11081 return NULL_TREE;
11085 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11086 arguments to the call.
11088 Return NULL_TREE if no simplification was possible, otherwise return the
11089 simplified form of the call as a tree.
11091 The simplified form may be a constant or other expression which
11092 computes the same value, but in a more efficient manner (including
11093 calls to other builtin functions).
11095 The call may contain arguments which need to be evaluated, but
11096 which are not useful to determine the result of the call. In
11097 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11098 COMPOUND_EXPR will be an argument which must be evaluated.
11099 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11100 COMPOUND_EXPR in the chain will contain the tree for the simplified
11101 form of the builtin function call. */
11103 static tree
11104 fold_builtin_strncat (tree dst, tree src, tree len)
11106 if (!validate_arg (dst, POINTER_TYPE)
11107 || !validate_arg (src, POINTER_TYPE)
11108 || !validate_arg (len, INTEGER_TYPE))
11109 return NULL_TREE;
11110 else
11112 const char *p = c_getstr (src);
11114 /* If the requested length is zero, or the src parameter string
11115 length is zero, return the dst parameter. */
11116 if (integer_zerop (len) || (p && *p == '\0'))
11117 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11119 /* If the requested len is greater than or equal to the string
11120 length, call strcat. */
11121 if (TREE_CODE (len) == INTEGER_CST && p
11122 && compare_tree_int (len, strlen (p)) >= 0)
11124 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11126 /* If the replacement _DECL isn't initialized, don't do the
11127 transformation. */
11128 if (!fn)
11129 return NULL_TREE;
11131 return build_call_expr (fn, 2, dst, src);
11133 return NULL_TREE;
11137 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11138 to the call.
11140 Return NULL_TREE if no simplification was possible, otherwise return the
11141 simplified form of the call as a tree.
11143 The simplified form may be a constant or other expression which
11144 computes the same value, but in a more efficient manner (including
11145 calls to other builtin functions).
11147 The call may contain arguments which need to be evaluated, but
11148 which are not useful to determine the result of the call. In
11149 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11150 COMPOUND_EXPR will be an argument which must be evaluated.
11151 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11152 COMPOUND_EXPR in the chain will contain the tree for the simplified
11153 form of the builtin function call. */
11155 static tree
11156 fold_builtin_strspn (tree s1, tree s2)
11158 if (!validate_arg (s1, POINTER_TYPE)
11159 || !validate_arg (s2, POINTER_TYPE))
11160 return NULL_TREE;
11161 else
11163 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11165 /* If both arguments are constants, evaluate at compile-time. */
11166 if (p1 && p2)
11168 const size_t r = strspn (p1, p2);
11169 return size_int (r);
11172 /* If either argument is "", return NULL_TREE. */
11173 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11174 /* Evaluate and ignore both arguments in case either one has
11175 side-effects. */
11176 return omit_two_operands (integer_type_node, integer_zero_node,
11177 s1, s2);
11178 return NULL_TREE;
11182 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11183 to the call.
11185 Return NULL_TREE if no simplification was possible, otherwise return the
11186 simplified form of the call as a tree.
11188 The simplified form may be a constant or other expression which
11189 computes the same value, but in a more efficient manner (including
11190 calls to other builtin functions).
11192 The call may contain arguments which need to be evaluated, but
11193 which are not useful to determine the result of the call. In
11194 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11195 COMPOUND_EXPR will be an argument which must be evaluated.
11196 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11197 COMPOUND_EXPR in the chain will contain the tree for the simplified
11198 form of the builtin function call. */
11200 static tree
11201 fold_builtin_strcspn (tree s1, tree s2)
11203 if (!validate_arg (s1, POINTER_TYPE)
11204 || !validate_arg (s2, POINTER_TYPE))
11205 return NULL_TREE;
11206 else
11208 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11210 /* If both arguments are constants, evaluate at compile-time. */
11211 if (p1 && p2)
11213 const size_t r = strcspn (p1, p2);
11214 return size_int (r);
11217 /* If the first argument is "", return NULL_TREE. */
11218 if (p1 && *p1 == '\0')
11220 /* Evaluate and ignore argument s2 in case it has
11221 side-effects. */
11222 return omit_one_operand (integer_type_node,
11223 integer_zero_node, s2);
11226 /* If the second argument is "", return __builtin_strlen(s1). */
11227 if (p2 && *p2 == '\0')
11229 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11231 /* If the replacement _DECL isn't initialized, don't do the
11232 transformation. */
11233 if (!fn)
11234 return NULL_TREE;
11236 return build_call_expr (fn, 1, s1);
11238 return NULL_TREE;
11242 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11243 to the call. IGNORE is true if the value returned
11244 by the builtin will be ignored. UNLOCKED is true is true if this
11245 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11246 the known length of the string. Return NULL_TREE if no simplification
11247 was possible. */
11249 tree
11250 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11252 /* If we're using an unlocked function, assume the other unlocked
11253 functions exist explicitly. */
11254 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11255 : implicit_built_in_decls[BUILT_IN_FPUTC];
11256 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11257 : implicit_built_in_decls[BUILT_IN_FWRITE];
11259 /* If the return value is used, don't do the transformation. */
11260 if (!ignore)
11261 return NULL_TREE;
11263 /* Verify the arguments in the original call. */
11264 if (!validate_arg (arg0, POINTER_TYPE)
11265 || !validate_arg (arg1, POINTER_TYPE))
11266 return NULL_TREE;
11268 if (! len)
11269 len = c_strlen (arg0, 0);
11271 /* Get the length of the string passed to fputs. If the length
11272 can't be determined, punt. */
11273 if (!len
11274 || TREE_CODE (len) != INTEGER_CST)
11275 return NULL_TREE;
11277 switch (compare_tree_int (len, 1))
11279 case -1: /* length is 0, delete the call entirely . */
11280 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11282 case 0: /* length is 1, call fputc. */
11284 const char *p = c_getstr (arg0);
11286 if (p != NULL)
11288 if (fn_fputc)
11289 return build_call_expr (fn_fputc, 2,
11290 build_int_cst (NULL_TREE, p[0]), arg1);
11291 else
11292 return NULL_TREE;
11295 /* FALLTHROUGH */
11296 case 1: /* length is greater than 1, call fwrite. */
11298 /* If optimizing for size keep fputs. */
11299 if (optimize_size)
11300 return NULL_TREE;
11301 /* New argument list transforming fputs(string, stream) to
11302 fwrite(string, 1, len, stream). */
11303 if (fn_fwrite)
11304 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11305 else
11306 return NULL_TREE;
11308 default:
11309 gcc_unreachable ();
11311 return NULL_TREE;
11314 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11315 produced. False otherwise. This is done so that we don't output the error
11316 or warning twice or three times. */
11317 bool
11318 fold_builtin_next_arg (tree exp, bool va_start_p)
11320 tree fntype = TREE_TYPE (current_function_decl);
11321 int nargs = call_expr_nargs (exp);
11322 tree arg;
11324 if (TYPE_ARG_TYPES (fntype) == 0
11325 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11326 == void_type_node))
11328 error ("%<va_start%> used in function with fixed args");
11329 return true;
11332 if (va_start_p)
11334 if (va_start_p && (nargs != 2))
11336 error ("wrong number of arguments to function %<va_start%>");
11337 return true;
11339 arg = CALL_EXPR_ARG (exp, 1);
11341 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11342 when we checked the arguments and if needed issued a warning. */
11343 else
11345 if (nargs == 0)
11347 /* Evidently an out of date version of <stdarg.h>; can't validate
11348 va_start's second argument, but can still work as intended. */
11349 warning (0, "%<__builtin_next_arg%> called without an argument");
11350 return true;
11352 else if (nargs > 1)
11354 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11355 return true;
11357 arg = CALL_EXPR_ARG (exp, 0);
11360 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11361 or __builtin_next_arg (0) the first time we see it, after checking
11362 the arguments and if needed issuing a warning. */
11363 if (!integer_zerop (arg))
11365 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11367 /* Strip off all nops for the sake of the comparison. This
11368 is not quite the same as STRIP_NOPS. It does more.
11369 We must also strip off INDIRECT_EXPR for C++ reference
11370 parameters. */
11371 while (TREE_CODE (arg) == NOP_EXPR
11372 || TREE_CODE (arg) == CONVERT_EXPR
11373 || TREE_CODE (arg) == INDIRECT_REF)
11374 arg = TREE_OPERAND (arg, 0);
11375 if (arg != last_parm)
11377 /* FIXME: Sometimes with the tree optimizers we can get the
11378 not the last argument even though the user used the last
11379 argument. We just warn and set the arg to be the last
11380 argument so that we will get wrong-code because of
11381 it. */
11382 warning (0, "second parameter of %<va_start%> not last named argument");
11384 /* We want to verify the second parameter just once before the tree
11385 optimizers are run and then avoid keeping it in the tree,
11386 as otherwise we could warn even for correct code like:
11387 void foo (int i, ...)
11388 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11389 if (va_start_p)
11390 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11391 else
11392 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11394 return false;
11398 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11399 ORIG may be null if this is a 2-argument call. We don't attempt to
11400 simplify calls with more than 3 arguments.
11402 Return NULL_TREE if no simplification was possible, otherwise return the
11403 simplified form of the call as a tree. If IGNORED is true, it means that
11404 the caller does not use the returned value of the function. */
11406 static tree
11407 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11409 tree call, retval;
11410 const char *fmt_str = NULL;
11412 /* Verify the required arguments in the original call. We deal with two
11413 types of sprintf() calls: 'sprintf (str, fmt)' and
11414 'sprintf (dest, "%s", orig)'. */
11415 if (!validate_arg (dest, POINTER_TYPE)
11416 || !validate_arg (fmt, POINTER_TYPE))
11417 return NULL_TREE;
11418 if (orig && !validate_arg (orig, POINTER_TYPE))
11419 return NULL_TREE;
11421 /* Check whether the format is a literal string constant. */
11422 fmt_str = c_getstr (fmt);
11423 if (fmt_str == NULL)
11424 return NULL_TREE;
11426 call = NULL_TREE;
11427 retval = NULL_TREE;
11429 if (!init_target_chars ())
11430 return NULL_TREE;
11432 /* If the format doesn't contain % args or %%, use strcpy. */
11433 if (strchr (fmt_str, target_percent) == NULL)
11435 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11437 if (!fn)
11438 return NULL_TREE;
11440 /* Don't optimize sprintf (buf, "abc", ptr++). */
11441 if (orig)
11442 return NULL_TREE;
11444 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11445 'format' is known to contain no % formats. */
11446 call = build_call_expr (fn, 2, dest, fmt);
11447 if (!ignored)
11448 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11451 /* If the format is "%s", use strcpy if the result isn't used. */
11452 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11454 tree fn;
11455 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11457 if (!fn)
11458 return NULL_TREE;
11460 /* Don't crash on sprintf (str1, "%s"). */
11461 if (!orig)
11462 return NULL_TREE;
11464 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11465 if (!ignored)
11467 retval = c_strlen (orig, 1);
11468 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11469 return NULL_TREE;
11471 call = build_call_expr (fn, 2, dest, orig);
11474 if (call && retval)
11476 retval = fold_convert
11477 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11478 retval);
11479 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11481 else
11482 return call;
11485 /* Expand a call EXP to __builtin_object_size. */
11488 expand_builtin_object_size (tree exp)
11490 tree ost;
11491 int object_size_type;
11492 tree fndecl = get_callee_fndecl (exp);
11494 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11496 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11497 exp, fndecl);
11498 expand_builtin_trap ();
11499 return const0_rtx;
11502 ost = CALL_EXPR_ARG (exp, 1);
11503 STRIP_NOPS (ost);
11505 if (TREE_CODE (ost) != INTEGER_CST
11506 || tree_int_cst_sgn (ost) < 0
11507 || compare_tree_int (ost, 3) > 0)
11509 error ("%Klast argument of %D is not integer constant between 0 and 3",
11510 exp, fndecl);
11511 expand_builtin_trap ();
11512 return const0_rtx;
11515 object_size_type = tree_low_cst (ost, 0);
11517 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11520 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11521 FCODE is the BUILT_IN_* to use.
11522 Return NULL_RTX if we failed; the caller should emit a normal call,
11523 otherwise try to get the result in TARGET, if convenient (and in
11524 mode MODE if that's convenient). */
11526 static rtx
11527 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11528 enum built_in_function fcode)
11530 tree dest, src, len, size;
11532 if (!validate_arglist (exp,
11533 POINTER_TYPE,
11534 fcode == BUILT_IN_MEMSET_CHK
11535 ? INTEGER_TYPE : POINTER_TYPE,
11536 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11537 return NULL_RTX;
11539 dest = CALL_EXPR_ARG (exp, 0);
11540 src = CALL_EXPR_ARG (exp, 1);
11541 len = CALL_EXPR_ARG (exp, 2);
11542 size = CALL_EXPR_ARG (exp, 3);
11544 if (! host_integerp (size, 1))
11545 return NULL_RTX;
11547 if (host_integerp (len, 1) || integer_all_onesp (size))
11549 tree fn;
11551 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11553 warning (0, "%Kcall to %D will always overflow destination buffer",
11554 exp, get_callee_fndecl (exp));
11555 return NULL_RTX;
11558 fn = NULL_TREE;
11559 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11560 mem{cpy,pcpy,move,set} is available. */
11561 switch (fcode)
11563 case BUILT_IN_MEMCPY_CHK:
11564 fn = built_in_decls[BUILT_IN_MEMCPY];
11565 break;
11566 case BUILT_IN_MEMPCPY_CHK:
11567 fn = built_in_decls[BUILT_IN_MEMPCPY];
11568 break;
11569 case BUILT_IN_MEMMOVE_CHK:
11570 fn = built_in_decls[BUILT_IN_MEMMOVE];
11571 break;
11572 case BUILT_IN_MEMSET_CHK:
11573 fn = built_in_decls[BUILT_IN_MEMSET];
11574 break;
11575 default:
11576 break;
11579 if (! fn)
11580 return NULL_RTX;
11582 fn = build_call_expr (fn, 3, dest, src, len);
11583 STRIP_TYPE_NOPS (fn);
11584 while (TREE_CODE (fn) == COMPOUND_EXPR)
11586 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11587 EXPAND_NORMAL);
11588 fn = TREE_OPERAND (fn, 1);
11590 if (TREE_CODE (fn) == CALL_EXPR)
11591 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11592 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11594 else if (fcode == BUILT_IN_MEMSET_CHK)
11595 return NULL_RTX;
11596 else
11598 unsigned int dest_align
11599 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11601 /* If DEST is not a pointer type, call the normal function. */
11602 if (dest_align == 0)
11603 return NULL_RTX;
11605 /* If SRC and DEST are the same (and not volatile), do nothing. */
11606 if (operand_equal_p (src, dest, 0))
11608 tree expr;
11610 if (fcode != BUILT_IN_MEMPCPY_CHK)
11612 /* Evaluate and ignore LEN in case it has side-effects. */
11613 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11614 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11617 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11618 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11621 /* __memmove_chk special case. */
11622 if (fcode == BUILT_IN_MEMMOVE_CHK)
11624 unsigned int src_align
11625 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11627 if (src_align == 0)
11628 return NULL_RTX;
11630 /* If src is categorized for a readonly section we can use
11631 normal __memcpy_chk. */
11632 if (readonly_data_expr (src))
11634 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11635 if (!fn)
11636 return NULL_RTX;
11637 fn = build_call_expr (fn, 4, dest, src, len, size);
11638 STRIP_TYPE_NOPS (fn);
11639 while (TREE_CODE (fn) == COMPOUND_EXPR)
11641 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11642 EXPAND_NORMAL);
11643 fn = TREE_OPERAND (fn, 1);
11645 if (TREE_CODE (fn) == CALL_EXPR)
11646 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11647 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11650 return NULL_RTX;
11654 /* Emit warning if a buffer overflow is detected at compile time. */
11656 static void
11657 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11659 int is_strlen = 0;
11660 tree len, size;
11662 switch (fcode)
11664 case BUILT_IN_STRCPY_CHK:
11665 case BUILT_IN_STPCPY_CHK:
11666 /* For __strcat_chk the warning will be emitted only if overflowing
11667 by at least strlen (dest) + 1 bytes. */
11668 case BUILT_IN_STRCAT_CHK:
11669 len = CALL_EXPR_ARG (exp, 1);
11670 size = CALL_EXPR_ARG (exp, 2);
11671 is_strlen = 1;
11672 break;
11673 case BUILT_IN_STRNCAT_CHK:
11674 case BUILT_IN_STRNCPY_CHK:
11675 len = CALL_EXPR_ARG (exp, 2);
11676 size = CALL_EXPR_ARG (exp, 3);
11677 break;
11678 case BUILT_IN_SNPRINTF_CHK:
11679 case BUILT_IN_VSNPRINTF_CHK:
11680 len = CALL_EXPR_ARG (exp, 1);
11681 size = CALL_EXPR_ARG (exp, 3);
11682 break;
11683 default:
11684 gcc_unreachable ();
11687 if (!len || !size)
11688 return;
11690 if (! host_integerp (size, 1) || integer_all_onesp (size))
11691 return;
11693 if (is_strlen)
11695 len = c_strlen (len, 1);
11696 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11697 return;
11699 else if (fcode == BUILT_IN_STRNCAT_CHK)
11701 tree src = CALL_EXPR_ARG (exp, 1);
11702 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11703 return;
11704 src = c_strlen (src, 1);
11705 if (! src || ! host_integerp (src, 1))
11707 warning (0, "%Kcall to %D might overflow destination buffer",
11708 exp, get_callee_fndecl (exp));
11709 return;
11711 else if (tree_int_cst_lt (src, size))
11712 return;
11714 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11715 return;
11717 warning (0, "%Kcall to %D will always overflow destination buffer",
11718 exp, get_callee_fndecl (exp));
11721 /* Emit warning if a buffer overflow is detected at compile time
11722 in __sprintf_chk/__vsprintf_chk calls. */
11724 static void
11725 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11727 tree dest, size, len, fmt, flag;
11728 const char *fmt_str;
11729 int nargs = call_expr_nargs (exp);
11731 /* Verify the required arguments in the original call. */
11733 if (nargs < 4)
11734 return;
11735 dest = CALL_EXPR_ARG (exp, 0);
11736 flag = CALL_EXPR_ARG (exp, 1);
11737 size = CALL_EXPR_ARG (exp, 2);
11738 fmt = CALL_EXPR_ARG (exp, 3);
11740 if (! host_integerp (size, 1) || integer_all_onesp (size))
11741 return;
11743 /* Check whether the format is a literal string constant. */
11744 fmt_str = c_getstr (fmt);
11745 if (fmt_str == NULL)
11746 return;
11748 if (!init_target_chars ())
11749 return;
11751 /* If the format doesn't contain % args or %%, we know its size. */
11752 if (strchr (fmt_str, target_percent) == 0)
11753 len = build_int_cstu (size_type_node, strlen (fmt_str));
11754 /* If the format is "%s" and first ... argument is a string literal,
11755 we know it too. */
11756 else if (fcode == BUILT_IN_SPRINTF_CHK
11757 && strcmp (fmt_str, target_percent_s) == 0)
11759 tree arg;
11761 if (nargs < 5)
11762 return;
11763 arg = CALL_EXPR_ARG (exp, 4);
11764 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11765 return;
11767 len = c_strlen (arg, 1);
11768 if (!len || ! host_integerp (len, 1))
11769 return;
11771 else
11772 return;
11774 if (! tree_int_cst_lt (len, size))
11776 warning (0, "%Kcall to %D will always overflow destination buffer",
11777 exp, get_callee_fndecl (exp));
11781 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11782 if possible. */
11784 tree
11785 fold_builtin_object_size (tree ptr, tree ost)
11787 tree ret = NULL_TREE;
11788 int object_size_type;
11790 if (!validate_arg (ptr, POINTER_TYPE)
11791 || !validate_arg (ost, INTEGER_TYPE))
11792 return NULL_TREE;
11794 STRIP_NOPS (ost);
11796 if (TREE_CODE (ost) != INTEGER_CST
11797 || tree_int_cst_sgn (ost) < 0
11798 || compare_tree_int (ost, 3) > 0)
11799 return NULL_TREE;
11801 object_size_type = tree_low_cst (ost, 0);
11803 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11804 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11805 and (size_t) 0 for types 2 and 3. */
11806 if (TREE_SIDE_EFFECTS (ptr))
11807 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11809 if (TREE_CODE (ptr) == ADDR_EXPR)
11810 ret = build_int_cstu (size_type_node,
11811 compute_builtin_object_size (ptr, object_size_type));
11813 else if (TREE_CODE (ptr) == SSA_NAME)
11815 unsigned HOST_WIDE_INT bytes;
11817 /* If object size is not known yet, delay folding until
11818 later. Maybe subsequent passes will help determining
11819 it. */
11820 bytes = compute_builtin_object_size (ptr, object_size_type);
11821 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11822 ? -1 : 0))
11823 ret = build_int_cstu (size_type_node, bytes);
11826 if (ret)
11828 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11829 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11830 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11831 ret = NULL_TREE;
11834 return ret;
11837 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11838 DEST, SRC, LEN, and SIZE are the arguments to the call.
11839 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11840 code of the builtin. If MAXLEN is not NULL, it is maximum length
11841 passed as third argument. */
11843 tree
11844 fold_builtin_memory_chk (tree fndecl,
11845 tree dest, tree src, tree len, tree size,
11846 tree maxlen, bool ignore,
11847 enum built_in_function fcode)
11849 tree fn;
11851 if (!validate_arg (dest, POINTER_TYPE)
11852 || !validate_arg (src,
11853 (fcode == BUILT_IN_MEMSET_CHK
11854 ? INTEGER_TYPE : POINTER_TYPE))
11855 || !validate_arg (len, INTEGER_TYPE)
11856 || !validate_arg (size, INTEGER_TYPE))
11857 return NULL_TREE;
11859 /* If SRC and DEST are the same (and not volatile), return DEST
11860 (resp. DEST+LEN for __mempcpy_chk). */
11861 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11863 if (fcode != BUILT_IN_MEMPCPY_CHK)
11864 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11865 else
11867 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11868 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11872 if (! host_integerp (size, 1))
11873 return NULL_TREE;
11875 if (! integer_all_onesp (size))
11877 if (! host_integerp (len, 1))
11879 /* If LEN is not constant, try MAXLEN too.
11880 For MAXLEN only allow optimizing into non-_ocs function
11881 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11882 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11884 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11886 /* (void) __mempcpy_chk () can be optimized into
11887 (void) __memcpy_chk (). */
11888 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11889 if (!fn)
11890 return NULL_TREE;
11892 return build_call_expr (fn, 4, dest, src, len, size);
11894 return NULL_TREE;
11897 else
11898 maxlen = len;
11900 if (tree_int_cst_lt (size, maxlen))
11901 return NULL_TREE;
11904 fn = NULL_TREE;
11905 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11906 mem{cpy,pcpy,move,set} is available. */
11907 switch (fcode)
11909 case BUILT_IN_MEMCPY_CHK:
11910 fn = built_in_decls[BUILT_IN_MEMCPY];
11911 break;
11912 case BUILT_IN_MEMPCPY_CHK:
11913 fn = built_in_decls[BUILT_IN_MEMPCPY];
11914 break;
11915 case BUILT_IN_MEMMOVE_CHK:
11916 fn = built_in_decls[BUILT_IN_MEMMOVE];
11917 break;
11918 case BUILT_IN_MEMSET_CHK:
11919 fn = built_in_decls[BUILT_IN_MEMSET];
11920 break;
11921 default:
11922 break;
11925 if (!fn)
11926 return NULL_TREE;
11928 return build_call_expr (fn, 3, dest, src, len);
11931 /* Fold a call to the __st[rp]cpy_chk builtin.
11932 DEST, SRC, and SIZE are the arguments to the call.
11933 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11934 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11935 strings passed as second argument. */
11937 tree
11938 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11939 tree maxlen, bool ignore,
11940 enum built_in_function fcode)
11942 tree len, fn;
11944 if (!validate_arg (dest, POINTER_TYPE)
11945 || !validate_arg (src, POINTER_TYPE)
11946 || !validate_arg (size, INTEGER_TYPE))
11947 return NULL_TREE;
11949 /* If SRC and DEST are the same (and not volatile), return DEST. */
11950 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11951 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11953 if (! host_integerp (size, 1))
11954 return NULL_TREE;
11956 if (! integer_all_onesp (size))
11958 len = c_strlen (src, 1);
11959 if (! len || ! host_integerp (len, 1))
11961 /* If LEN is not constant, try MAXLEN too.
11962 For MAXLEN only allow optimizing into non-_ocs function
11963 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11964 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11966 if (fcode == BUILT_IN_STPCPY_CHK)
11968 if (! ignore)
11969 return NULL_TREE;
11971 /* If return value of __stpcpy_chk is ignored,
11972 optimize into __strcpy_chk. */
11973 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11974 if (!fn)
11975 return NULL_TREE;
11977 return build_call_expr (fn, 3, dest, src, size);
11980 if (! len || TREE_SIDE_EFFECTS (len))
11981 return NULL_TREE;
11983 /* If c_strlen returned something, but not a constant,
11984 transform __strcpy_chk into __memcpy_chk. */
11985 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11986 if (!fn)
11987 return NULL_TREE;
11989 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11990 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11991 build_call_expr (fn, 4,
11992 dest, src, len, size));
11995 else
11996 maxlen = len;
11998 if (! tree_int_cst_lt (maxlen, size))
11999 return NULL_TREE;
12002 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12003 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12004 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12005 if (!fn)
12006 return NULL_TREE;
12008 return build_call_expr (fn, 2, dest, src);
12011 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12012 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12013 length passed as third argument. */
12015 tree
12016 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12017 tree maxlen)
12019 tree fn;
12021 if (!validate_arg (dest, POINTER_TYPE)
12022 || !validate_arg (src, POINTER_TYPE)
12023 || !validate_arg (len, INTEGER_TYPE)
12024 || !validate_arg (size, INTEGER_TYPE))
12025 return NULL_TREE;
12027 if (! host_integerp (size, 1))
12028 return NULL_TREE;
12030 if (! integer_all_onesp (size))
12032 if (! host_integerp (len, 1))
12034 /* If LEN is not constant, try MAXLEN too.
12035 For MAXLEN only allow optimizing into non-_ocs function
12036 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12037 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12038 return NULL_TREE;
12040 else
12041 maxlen = len;
12043 if (tree_int_cst_lt (size, maxlen))
12044 return NULL_TREE;
12047 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12048 fn = built_in_decls[BUILT_IN_STRNCPY];
12049 if (!fn)
12050 return NULL_TREE;
12052 return build_call_expr (fn, 3, dest, src, len);
12055 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12056 are the arguments to the call. */
12058 static tree
12059 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12061 tree fn;
12062 const char *p;
12064 if (!validate_arg (dest, POINTER_TYPE)
12065 || !validate_arg (src, POINTER_TYPE)
12066 || !validate_arg (size, INTEGER_TYPE))
12067 return NULL_TREE;
12069 p = c_getstr (src);
12070 /* If the SRC parameter is "", return DEST. */
12071 if (p && *p == '\0')
12072 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12074 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12075 return NULL_TREE;
12077 /* If __builtin_strcat_chk is used, assume strcat is available. */
12078 fn = built_in_decls[BUILT_IN_STRCAT];
12079 if (!fn)
12080 return NULL_TREE;
12082 return build_call_expr (fn, 2, dest, src);
12085 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12086 LEN, and SIZE. */
12088 static tree
12089 fold_builtin_strncat_chk (tree fndecl,
12090 tree dest, tree src, tree len, tree size)
12092 tree fn;
12093 const char *p;
12095 if (!validate_arg (dest, POINTER_TYPE)
12096 || !validate_arg (src, POINTER_TYPE)
12097 || !validate_arg (size, INTEGER_TYPE)
12098 || !validate_arg (size, INTEGER_TYPE))
12099 return NULL_TREE;
12101 p = c_getstr (src);
12102 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12103 if (p && *p == '\0')
12104 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12105 else if (integer_zerop (len))
12106 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12108 if (! host_integerp (size, 1))
12109 return NULL_TREE;
12111 if (! integer_all_onesp (size))
12113 tree src_len = c_strlen (src, 1);
12114 if (src_len
12115 && host_integerp (src_len, 1)
12116 && host_integerp (len, 1)
12117 && ! tree_int_cst_lt (len, src_len))
12119 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12120 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12121 if (!fn)
12122 return NULL_TREE;
12124 return build_call_expr (fn, 3, dest, src, size);
12126 return NULL_TREE;
12129 /* If __builtin_strncat_chk is used, assume strncat is available. */
12130 fn = built_in_decls[BUILT_IN_STRNCAT];
12131 if (!fn)
12132 return NULL_TREE;
12134 return build_call_expr (fn, 3, dest, src, len);
12137 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12138 a normal call should be emitted rather than expanding the function
12139 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12141 static tree
12142 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12144 tree dest, size, len, fn, fmt, flag;
12145 const char *fmt_str;
12146 int nargs = call_expr_nargs (exp);
12148 /* Verify the required arguments in the original call. */
12149 if (nargs < 4)
12150 return NULL_TREE;
12151 dest = CALL_EXPR_ARG (exp, 0);
12152 if (!validate_arg (dest, POINTER_TYPE))
12153 return NULL_TREE;
12154 flag = CALL_EXPR_ARG (exp, 1);
12155 if (!validate_arg (flag, INTEGER_TYPE))
12156 return NULL_TREE;
12157 size = CALL_EXPR_ARG (exp, 2);
12158 if (!validate_arg (size, INTEGER_TYPE))
12159 return NULL_TREE;
12160 fmt = CALL_EXPR_ARG (exp, 3);
12161 if (!validate_arg (fmt, POINTER_TYPE))
12162 return NULL_TREE;
12164 if (! host_integerp (size, 1))
12165 return NULL_TREE;
12167 len = NULL_TREE;
12169 if (!init_target_chars ())
12170 return NULL_TREE;
12172 /* Check whether the format is a literal string constant. */
12173 fmt_str = c_getstr (fmt);
12174 if (fmt_str != NULL)
12176 /* If the format doesn't contain % args or %%, we know the size. */
12177 if (strchr (fmt_str, target_percent) == 0)
12179 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12180 len = build_int_cstu (size_type_node, strlen (fmt_str));
12182 /* If the format is "%s" and first ... argument is a string literal,
12183 we know the size too. */
12184 else if (fcode == BUILT_IN_SPRINTF_CHK
12185 && strcmp (fmt_str, target_percent_s) == 0)
12187 tree arg;
12189 if (nargs == 5)
12191 arg = CALL_EXPR_ARG (exp, 4);
12192 if (validate_arg (arg, POINTER_TYPE))
12194 len = c_strlen (arg, 1);
12195 if (! len || ! host_integerp (len, 1))
12196 len = NULL_TREE;
12202 if (! integer_all_onesp (size))
12204 if (! len || ! tree_int_cst_lt (len, size))
12205 return NULL_TREE;
12208 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12209 or if format doesn't contain % chars or is "%s". */
12210 if (! integer_zerop (flag))
12212 if (fmt_str == NULL)
12213 return NULL_TREE;
12214 if (strchr (fmt_str, target_percent) != NULL
12215 && strcmp (fmt_str, target_percent_s))
12216 return NULL_TREE;
12219 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12220 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12221 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12222 if (!fn)
12223 return NULL_TREE;
12225 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12228 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12229 a normal call should be emitted rather than expanding the function
12230 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12231 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12232 passed as second argument. */
12234 tree
12235 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12236 enum built_in_function fcode)
12238 tree dest, size, len, fn, fmt, flag;
12239 const char *fmt_str;
12241 /* Verify the required arguments in the original call. */
12242 if (call_expr_nargs (exp) < 5)
12243 return NULL_TREE;
12244 dest = CALL_EXPR_ARG (exp, 0);
12245 if (!validate_arg (dest, POINTER_TYPE))
12246 return NULL_TREE;
12247 len = CALL_EXPR_ARG (exp, 1);
12248 if (!validate_arg (len, INTEGER_TYPE))
12249 return NULL_TREE;
12250 flag = CALL_EXPR_ARG (exp, 2);
12251 if (!validate_arg (flag, INTEGER_TYPE))
12252 return NULL_TREE;
12253 size = CALL_EXPR_ARG (exp, 3);
12254 if (!validate_arg (size, INTEGER_TYPE))
12255 return NULL_TREE;
12256 fmt = CALL_EXPR_ARG (exp, 4);
12257 if (!validate_arg (fmt, POINTER_TYPE))
12258 return NULL_TREE;
12260 if (! host_integerp (size, 1))
12261 return NULL_TREE;
12263 if (! integer_all_onesp (size))
12265 if (! host_integerp (len, 1))
12267 /* If LEN is not constant, try MAXLEN too.
12268 For MAXLEN only allow optimizing into non-_ocs function
12269 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12270 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12271 return NULL_TREE;
12273 else
12274 maxlen = len;
12276 if (tree_int_cst_lt (size, maxlen))
12277 return NULL_TREE;
12280 if (!init_target_chars ())
12281 return NULL_TREE;
12283 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12284 or if format doesn't contain % chars or is "%s". */
12285 if (! integer_zerop (flag))
12287 fmt_str = c_getstr (fmt);
12288 if (fmt_str == NULL)
12289 return NULL_TREE;
12290 if (strchr (fmt_str, target_percent) != NULL
12291 && strcmp (fmt_str, target_percent_s))
12292 return NULL_TREE;
12295 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12296 available. */
12297 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12298 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12299 if (!fn)
12300 return NULL_TREE;
12302 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12305 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12306 FMT and ARG are the arguments to the call; we don't fold cases with
12307 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12309 Return NULL_TREE if no simplification was possible, otherwise return the
12310 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12311 code of the function to be simplified. */
12313 static tree
12314 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12315 enum built_in_function fcode)
12317 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12318 const char *fmt_str = NULL;
12320 /* If the return value is used, don't do the transformation. */
12321 if (! ignore)
12322 return NULL_TREE;
12324 /* Verify the required arguments in the original call. */
12325 if (!validate_arg (fmt, POINTER_TYPE))
12326 return NULL_TREE;
12328 /* Check whether the format is a literal string constant. */
12329 fmt_str = c_getstr (fmt);
12330 if (fmt_str == NULL)
12331 return NULL_TREE;
12333 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12335 /* If we're using an unlocked function, assume the other
12336 unlocked functions exist explicitly. */
12337 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12338 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12340 else
12342 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12343 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12346 if (!init_target_chars ())
12347 return NULL_TREE;
12349 if (strcmp (fmt_str, target_percent_s) == 0
12350 || strchr (fmt_str, target_percent) == NULL)
12352 const char *str;
12354 if (strcmp (fmt_str, target_percent_s) == 0)
12356 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12357 return NULL_TREE;
12359 if (!arg || !validate_arg (arg, POINTER_TYPE))
12360 return NULL_TREE;
12362 str = c_getstr (arg);
12363 if (str == NULL)
12364 return NULL_TREE;
12366 else
12368 /* The format specifier doesn't contain any '%' characters. */
12369 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12370 && arg)
12371 return NULL_TREE;
12372 str = fmt_str;
12375 /* If the string was "", printf does nothing. */
12376 if (str[0] == '\0')
12377 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12379 /* If the string has length of 1, call putchar. */
12380 if (str[1] == '\0')
12382 /* Given printf("c"), (where c is any one character,)
12383 convert "c"[0] to an int and pass that to the replacement
12384 function. */
12385 newarg = build_int_cst (NULL_TREE, str[0]);
12386 if (fn_putchar)
12387 call = build_call_expr (fn_putchar, 1, newarg);
12389 else
12391 /* If the string was "string\n", call puts("string"). */
12392 size_t len = strlen (str);
12393 if ((unsigned char)str[len - 1] == target_newline)
12395 /* Create a NUL-terminated string that's one char shorter
12396 than the original, stripping off the trailing '\n'. */
12397 char *newstr = alloca (len);
12398 memcpy (newstr, str, len - 1);
12399 newstr[len - 1] = 0;
12401 newarg = build_string_literal (len, newstr);
12402 if (fn_puts)
12403 call = build_call_expr (fn_puts, 1, newarg);
12405 else
12406 /* We'd like to arrange to call fputs(string,stdout) here,
12407 but we need stdout and don't have a way to get it yet. */
12408 return NULL_TREE;
12412 /* The other optimizations can be done only on the non-va_list variants. */
12413 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12414 return NULL_TREE;
12416 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12417 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12419 if (!arg || !validate_arg (arg, POINTER_TYPE))
12420 return NULL_TREE;
12421 if (fn_puts)
12422 call = build_call_expr (fn_puts, 1, arg);
12425 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12426 else if (strcmp (fmt_str, target_percent_c) == 0)
12428 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12429 return NULL_TREE;
12430 if (fn_putchar)
12431 call = build_call_expr (fn_putchar, 1, arg);
12434 if (!call)
12435 return NULL_TREE;
12437 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12440 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12441 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12442 more than 3 arguments, and ARG may be null in the 2-argument case.
12444 Return NULL_TREE if no simplification was possible, otherwise return the
12445 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12446 code of the function to be simplified. */
12448 static tree
12449 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12450 enum built_in_function fcode)
12452 tree fn_fputc, fn_fputs, call = NULL_TREE;
12453 const char *fmt_str = NULL;
12455 /* If the return value is used, don't do the transformation. */
12456 if (! ignore)
12457 return NULL_TREE;
12459 /* Verify the required arguments in the original call. */
12460 if (!validate_arg (fp, POINTER_TYPE))
12461 return NULL_TREE;
12462 if (!validate_arg (fmt, POINTER_TYPE))
12463 return NULL_TREE;
12465 /* Check whether the format is a literal string constant. */
12466 fmt_str = c_getstr (fmt);
12467 if (fmt_str == NULL)
12468 return NULL_TREE;
12470 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12472 /* If we're using an unlocked function, assume the other
12473 unlocked functions exist explicitly. */
12474 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12475 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12477 else
12479 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12480 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12483 if (!init_target_chars ())
12484 return NULL_TREE;
12486 /* If the format doesn't contain % args or %%, use strcpy. */
12487 if (strchr (fmt_str, target_percent) == NULL)
12489 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12490 && arg)
12491 return NULL_TREE;
12493 /* If the format specifier was "", fprintf does nothing. */
12494 if (fmt_str[0] == '\0')
12496 /* If FP has side-effects, just wait until gimplification is
12497 done. */
12498 if (TREE_SIDE_EFFECTS (fp))
12499 return NULL_TREE;
12501 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12504 /* When "string" doesn't contain %, replace all cases of
12505 fprintf (fp, string) with fputs (string, fp). The fputs
12506 builtin will take care of special cases like length == 1. */
12507 if (fn_fputs)
12508 call = build_call_expr (fn_fputs, 2, fmt, fp);
12511 /* The other optimizations can be done only on the non-va_list variants. */
12512 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12513 return NULL_TREE;
12515 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12516 else if (strcmp (fmt_str, target_percent_s) == 0)
12518 if (!arg || !validate_arg (arg, POINTER_TYPE))
12519 return NULL_TREE;
12520 if (fn_fputs)
12521 call = build_call_expr (fn_fputs, 2, arg, fp);
12524 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12525 else if (strcmp (fmt_str, target_percent_c) == 0)
12527 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12528 return NULL_TREE;
12529 if (fn_fputc)
12530 call = build_call_expr (fn_fputc, 2, arg, fp);
12533 if (!call)
12534 return NULL_TREE;
12535 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12538 /* Initialize format string characters in the target charset. */
12540 static bool
12541 init_target_chars (void)
12543 static bool init;
12544 if (!init)
12546 target_newline = lang_hooks.to_target_charset ('\n');
12547 target_percent = lang_hooks.to_target_charset ('%');
12548 target_c = lang_hooks.to_target_charset ('c');
12549 target_s = lang_hooks.to_target_charset ('s');
12550 if (target_newline == 0 || target_percent == 0 || target_c == 0
12551 || target_s == 0)
12552 return false;
12554 target_percent_c[0] = target_percent;
12555 target_percent_c[1] = target_c;
12556 target_percent_c[2] = '\0';
12558 target_percent_s[0] = target_percent;
12559 target_percent_s[1] = target_s;
12560 target_percent_s[2] = '\0';
12562 target_percent_s_newline[0] = target_percent;
12563 target_percent_s_newline[1] = target_s;
12564 target_percent_s_newline[2] = target_newline;
12565 target_percent_s_newline[3] = '\0';
12567 init = true;
12569 return true;
12572 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12573 and no overflow/underflow occurred. INEXACT is true if M was not
12574 exactly calculated. TYPE is the tree type for the result. This
12575 function assumes that you cleared the MPFR flags and then
12576 calculated M to see if anything subsequently set a flag prior to
12577 entering this function. Return NULL_TREE if any checks fail. */
12579 static tree
12580 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12582 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12583 overflow/underflow occurred. If -frounding-math, proceed iff the
12584 result of calling FUNC was exact. */
12585 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12586 && (!flag_rounding_math || !inexact))
12588 REAL_VALUE_TYPE rr;
12590 real_from_mpfr (&rr, m, type, GMP_RNDN);
12591 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12592 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12593 but the mpft_t is not, then we underflowed in the
12594 conversion. */
12595 if (real_isfinite (&rr)
12596 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12598 REAL_VALUE_TYPE rmode;
12600 real_convert (&rmode, TYPE_MODE (type), &rr);
12601 /* Proceed iff the specified mode can hold the value. */
12602 if (real_identical (&rmode, &rr))
12603 return build_real (type, rmode);
12606 return NULL_TREE;
12609 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12610 FUNC on it and return the resulting value as a tree with type TYPE.
12611 If MIN and/or MAX are not NULL, then the supplied ARG must be
12612 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12613 acceptable values, otherwise they are not. The mpfr precision is
12614 set to the precision of TYPE. We assume that function FUNC returns
12615 zero if the result could be calculated exactly within the requested
12616 precision. */
12618 static tree
12619 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12620 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12621 bool inclusive)
12623 tree result = NULL_TREE;
12625 STRIP_NOPS (arg);
12627 /* To proceed, MPFR must exactly represent the target floating point
12628 format, which only happens when the target base equals two. */
12629 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12630 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12632 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12634 if (real_isfinite (ra)
12635 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12636 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12638 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12639 int inexact;
12640 mpfr_t m;
12642 mpfr_init2 (m, prec);
12643 mpfr_from_real (m, ra, GMP_RNDN);
12644 mpfr_clear_flags ();
12645 inexact = func (m, m, GMP_RNDN);
12646 result = do_mpfr_ckconv (m, type, inexact);
12647 mpfr_clear (m);
12651 return result;
12654 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12655 FUNC on it and return the resulting value as a tree with type TYPE.
12656 The mpfr precision is set to the precision of TYPE. We assume that
12657 function FUNC returns zero if the result could be calculated
12658 exactly within the requested precision. */
12660 static tree
12661 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12662 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12664 tree result = NULL_TREE;
12666 STRIP_NOPS (arg1);
12667 STRIP_NOPS (arg2);
12669 /* To proceed, MPFR must exactly represent the target floating point
12670 format, which only happens when the target base equals two. */
12671 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12672 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12673 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12675 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12676 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12678 if (real_isfinite (ra1) && real_isfinite (ra2))
12680 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12681 int inexact;
12682 mpfr_t m1, m2;
12684 mpfr_inits2 (prec, m1, m2, NULL);
12685 mpfr_from_real (m1, ra1, GMP_RNDN);
12686 mpfr_from_real (m2, ra2, GMP_RNDN);
12687 mpfr_clear_flags ();
12688 inexact = func (m1, m1, m2, GMP_RNDN);
12689 result = do_mpfr_ckconv (m1, type, inexact);
12690 mpfr_clears (m1, m2, NULL);
12694 return result;
12697 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12698 FUNC on it and return the resulting value as a tree with type TYPE.
12699 The mpfr precision is set to the precision of TYPE. We assume that
12700 function FUNC returns zero if the result could be calculated
12701 exactly within the requested precision. */
12703 static tree
12704 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12705 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12707 tree result = NULL_TREE;
12709 STRIP_NOPS (arg1);
12710 STRIP_NOPS (arg2);
12711 STRIP_NOPS (arg3);
12713 /* To proceed, MPFR must exactly represent the target floating point
12714 format, which only happens when the target base equals two. */
12715 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12716 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12717 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12718 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12720 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12721 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12722 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12724 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12726 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12727 int inexact;
12728 mpfr_t m1, m2, m3;
12730 mpfr_inits2 (prec, m1, m2, m3, NULL);
12731 mpfr_from_real (m1, ra1, GMP_RNDN);
12732 mpfr_from_real (m2, ra2, GMP_RNDN);
12733 mpfr_from_real (m3, ra3, GMP_RNDN);
12734 mpfr_clear_flags ();
12735 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12736 result = do_mpfr_ckconv (m1, type, inexact);
12737 mpfr_clears (m1, m2, m3, NULL);
12741 return result;
12744 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12745 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12746 If ARG_SINP and ARG_COSP are NULL then the result is returned
12747 as a complex value.
12748 The type is taken from the type of ARG and is used for setting the
12749 precision of the calculation and results. */
12751 static tree
12752 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12754 tree const type = TREE_TYPE (arg);
12755 tree result = NULL_TREE;
12757 STRIP_NOPS (arg);
12759 /* To proceed, MPFR must exactly represent the target floating point
12760 format, which only happens when the target base equals two. */
12761 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12762 && TREE_CODE (arg) == REAL_CST
12763 && !TREE_OVERFLOW (arg))
12765 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12767 if (real_isfinite (ra))
12769 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12770 tree result_s, result_c;
12771 int inexact;
12772 mpfr_t m, ms, mc;
12774 mpfr_inits2 (prec, m, ms, mc, NULL);
12775 mpfr_from_real (m, ra, GMP_RNDN);
12776 mpfr_clear_flags ();
12777 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12778 result_s = do_mpfr_ckconv (ms, type, inexact);
12779 result_c = do_mpfr_ckconv (mc, type, inexact);
12780 mpfr_clears (m, ms, mc, NULL);
12781 if (result_s && result_c)
12783 /* If we are to return in a complex value do so. */
12784 if (!arg_sinp && !arg_cosp)
12785 return build_complex (build_complex_type (type),
12786 result_c, result_s);
12788 /* Dereference the sin/cos pointer arguments. */
12789 arg_sinp = build_fold_indirect_ref (arg_sinp);
12790 arg_cosp = build_fold_indirect_ref (arg_cosp);
12791 /* Proceed if valid pointer type were passed in. */
12792 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12793 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12795 /* Set the values. */
12796 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12797 result_s);
12798 TREE_SIDE_EFFECTS (result_s) = 1;
12799 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12800 result_c);
12801 TREE_SIDE_EFFECTS (result_c) = 1;
12802 /* Combine the assignments into a compound expr. */
12803 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12804 result_s, result_c));
12809 return result;
12812 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12813 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12814 two-argument mpfr order N Bessel function FUNC on them and return
12815 the resulting value as a tree with type TYPE. The mpfr precision
12816 is set to the precision of TYPE. We assume that function FUNC
12817 returns zero if the result could be calculated exactly within the
12818 requested precision. */
12819 static tree
12820 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12821 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12822 const REAL_VALUE_TYPE *min, bool inclusive)
12824 tree result = NULL_TREE;
12826 STRIP_NOPS (arg1);
12827 STRIP_NOPS (arg2);
12829 /* To proceed, MPFR must exactly represent the target floating point
12830 format, which only happens when the target base equals two. */
12831 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12832 && host_integerp (arg1, 0)
12833 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12835 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12836 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12838 if (n == (long)n
12839 && real_isfinite (ra)
12840 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12842 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12843 int inexact;
12844 mpfr_t m;
12846 mpfr_init2 (m, prec);
12847 mpfr_from_real (m, ra, GMP_RNDN);
12848 mpfr_clear_flags ();
12849 inexact = func (m, n, m, GMP_RNDN);
12850 result = do_mpfr_ckconv (m, type, inexact);
12851 mpfr_clear (m);
12855 return result;
12858 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12859 the pointer *(ARG_QUO) and return the result. The type is taken
12860 from the type of ARG0 and is used for setting the precision of the
12861 calculation and results. */
12863 static tree
12864 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12866 tree const type = TREE_TYPE (arg0);
12867 tree result = NULL_TREE;
12869 STRIP_NOPS (arg0);
12870 STRIP_NOPS (arg1);
12872 /* To proceed, MPFR must exactly represent the target floating point
12873 format, which only happens when the target base equals two. */
12874 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12875 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12876 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12878 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12879 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12881 if (real_isfinite (ra0) && real_isfinite (ra1))
12883 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12884 tree result_rem;
12885 long integer_quo;
12886 mpfr_t m0, m1;
12888 mpfr_inits2 (prec, m0, m1, NULL);
12889 mpfr_from_real (m0, ra0, GMP_RNDN);
12890 mpfr_from_real (m1, ra1, GMP_RNDN);
12891 mpfr_clear_flags ();
12892 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12893 /* Remquo is independent of the rounding mode, so pass
12894 inexact=0 to do_mpfr_ckconv(). */
12895 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12896 mpfr_clears (m0, m1, NULL);
12897 if (result_rem)
12899 /* MPFR calculates quo in the host's long so it may
12900 return more bits in quo than the target int can hold
12901 if sizeof(host long) > sizeof(target int). This can
12902 happen even for native compilers in LP64 mode. In
12903 these cases, modulo the quo value with the largest
12904 number that the target int can hold while leaving one
12905 bit for the sign. */
12906 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12907 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12909 /* Dereference the quo pointer argument. */
12910 arg_quo = build_fold_indirect_ref (arg_quo);
12911 /* Proceed iff a valid pointer type was passed in. */
12912 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12914 /* Set the value. */
12915 tree result_quo = fold_build2 (MODIFY_EXPR,
12916 TREE_TYPE (arg_quo), arg_quo,
12917 build_int_cst (NULL, integer_quo));
12918 TREE_SIDE_EFFECTS (result_quo) = 1;
12919 /* Combine the quo assignment with the rem. */
12920 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12921 result_quo, result_rem));
12926 return result;
12929 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12930 resulting value as a tree with type TYPE. The mpfr precision is
12931 set to the precision of TYPE. We assume that this mpfr function
12932 returns zero if the result could be calculated exactly within the
12933 requested precision. In addition, the integer pointer represented
12934 by ARG_SG will be dereferenced and set to the appropriate signgam
12935 (-1,1) value. */
12937 static tree
12938 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12940 tree result = NULL_TREE;
12942 STRIP_NOPS (arg);
12944 /* To proceed, MPFR must exactly represent the target floating point
12945 format, which only happens when the target base equals two. Also
12946 verify ARG is a constant and that ARG_SG is an int pointer. */
12947 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12948 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12949 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12950 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12952 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12954 /* In addition to NaN and Inf, the argument cannot be zero or a
12955 negative integer. */
12956 if (real_isfinite (ra)
12957 && ra->cl != rvc_zero
12958 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
12960 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12961 int inexact, sg;
12962 mpfr_t m;
12963 tree result_lg;
12965 mpfr_init2 (m, prec);
12966 mpfr_from_real (m, ra, GMP_RNDN);
12967 mpfr_clear_flags ();
12968 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
12969 result_lg = do_mpfr_ckconv (m, type, inexact);
12970 mpfr_clear (m);
12971 if (result_lg)
12973 tree result_sg;
12975 /* Dereference the arg_sg pointer argument. */
12976 arg_sg = build_fold_indirect_ref (arg_sg);
12977 /* Assign the signgam value into *arg_sg. */
12978 result_sg = fold_build2 (MODIFY_EXPR,
12979 TREE_TYPE (arg_sg), arg_sg,
12980 build_int_cst (NULL, sg));
12981 TREE_SIDE_EFFECTS (result_sg) = 1;
12982 /* Combine the signgam assignment with the lgamma result. */
12983 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12984 result_sg, result_lg));
12989 return result;
12991 #endif