tree-cfg.c (verify_expr): Check with is_gimple_address.
[official-gcc.git] / gcc / builtins.c
blob5b02d8bc6ed70e8e40192c9fb2c352af80f3be82
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tree-gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
67 #undef DEF_BUILTIN
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
85 #endif
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
187 enum tree_code);
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
239 #endif
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
249 return true;
250 if (strncmp (name, "__sync_", 7) == 0)
251 return true;
252 return false;
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
270 return 0;
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
273 return 0;
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
278 while (1)
280 switch (TREE_CODE (exp))
282 case NOP_EXPR:
283 case CONVERT_EXPR:
284 exp = TREE_OPERAND (exp, 0);
285 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
286 return align;
288 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
289 align = MIN (inner, max_align);
290 break;
292 case POINTER_PLUS_EXPR:
293 /* If sum of pointer + int, restrict our maximum alignment to that
294 imposed by the integer. If not, we can't do any better than
295 ALIGN. */
296 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
297 return align;
299 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
300 & (max_align / BITS_PER_UNIT - 1))
301 != 0)
302 max_align >>= 1;
304 exp = TREE_OPERAND (exp, 0);
305 break;
307 case ADDR_EXPR:
308 /* See what we are pointing at and look at its alignment. */
309 exp = TREE_OPERAND (exp, 0);
310 inner = max_align;
311 if (handled_component_p (exp))
313 HOST_WIDE_INT bitsize, bitpos;
314 tree offset;
315 enum machine_mode mode;
316 int unsignedp, volatilep;
318 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
319 &mode, &unsignedp, &volatilep, true);
320 if (bitpos)
321 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
322 if (offset && TREE_CODE (offset) == PLUS_EXPR
323 && host_integerp (TREE_OPERAND (offset, 1), 1))
325 /* Any overflow in calculating offset_bits won't change
326 the alignment. */
327 unsigned offset_bits
328 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
329 * BITS_PER_UNIT);
331 if (offset_bits)
332 inner = MIN (inner, (offset_bits & -offset_bits));
333 offset = TREE_OPERAND (offset, 0);
335 if (offset && TREE_CODE (offset) == MULT_EXPR
336 && host_integerp (TREE_OPERAND (offset, 1), 1))
338 /* Any overflow in calculating offset_factor won't change
339 the alignment. */
340 unsigned offset_factor
341 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
342 * BITS_PER_UNIT);
344 if (offset_factor)
345 inner = MIN (inner, (offset_factor & -offset_factor));
347 else if (offset)
348 inner = MIN (inner, BITS_PER_UNIT);
350 if (DECL_P (exp))
351 align = MIN (inner, DECL_ALIGN (exp));
352 #ifdef CONSTANT_ALIGNMENT
353 else if (CONSTANT_CLASS_P (exp))
354 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
355 #endif
356 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
357 || TREE_CODE (exp) == INDIRECT_REF)
358 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
359 else
360 align = MIN (align, inner);
361 return MIN (align, max_align);
363 default:
364 return align;
369 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
370 way, because it could contain a zero byte in the middle.
371 TREE_STRING_LENGTH is the size of the character array, not the string.
373 ONLY_VALUE should be nonzero if the result is not going to be emitted
374 into the instruction stream and zero if it is going to be expanded.
375 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
376 is returned, otherwise NULL, since
377 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
378 evaluate the side-effects.
380 The value returned is of type `ssizetype'.
382 Unfortunately, string_constant can't access the values of const char
383 arrays with initializers, so neither can we do so here. */
385 tree
386 c_strlen (tree src, int only_value)
388 tree offset_node;
389 HOST_WIDE_INT offset;
390 int max;
391 const char *ptr;
393 STRIP_NOPS (src);
394 if (TREE_CODE (src) == COND_EXPR
395 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
397 tree len1, len2;
399 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
400 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
401 if (tree_int_cst_equal (len1, len2))
402 return len1;
405 if (TREE_CODE (src) == COMPOUND_EXPR
406 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
407 return c_strlen (TREE_OPERAND (src, 1), only_value);
409 src = string_constant (src, &offset_node);
410 if (src == 0)
411 return NULL_TREE;
413 max = TREE_STRING_LENGTH (src) - 1;
414 ptr = TREE_STRING_POINTER (src);
416 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
418 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
419 compute the offset to the following null if we don't know where to
420 start searching for it. */
421 int i;
423 for (i = 0; i < max; i++)
424 if (ptr[i] == 0)
425 return NULL_TREE;
427 /* We don't know the starting offset, but we do know that the string
428 has no internal zero bytes. We can assume that the offset falls
429 within the bounds of the string; otherwise, the programmer deserves
430 what he gets. Subtract the offset from the length of the string,
431 and return that. This would perhaps not be valid if we were dealing
432 with named arrays in addition to literal string constants. */
434 return size_diffop (size_int (max), offset_node);
437 /* We have a known offset into the string. Start searching there for
438 a null character if we can represent it as a single HOST_WIDE_INT. */
439 if (offset_node == 0)
440 offset = 0;
441 else if (! host_integerp (offset_node, 0))
442 offset = -1;
443 else
444 offset = tree_low_cst (offset_node, 0);
446 /* If the offset is known to be out of bounds, warn, and call strlen at
447 runtime. */
448 if (offset < 0 || offset > max)
450 warning (0, "offset outside bounds of constant string");
451 return NULL_TREE;
454 /* Use strlen to search for the first zero byte. Since any strings
455 constructed with build_string will have nulls appended, we win even
456 if we get handed something like (char[4])"abcd".
458 Since OFFSET is our starting index into the string, no further
459 calculation is needed. */
460 return ssize_int (strlen (ptr + offset));
463 /* Return a char pointer for a C string if it is a string constant
464 or sum of string constant and integer constant. */
466 static const char *
467 c_getstr (tree src)
469 tree offset_node;
471 src = string_constant (src, &offset_node);
472 if (src == 0)
473 return 0;
475 if (offset_node == 0)
476 return TREE_STRING_POINTER (src);
477 else if (!host_integerp (offset_node, 1)
478 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
479 return 0;
481 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
484 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
485 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
487 static rtx
488 c_readstr (const char *str, enum machine_mode mode)
490 HOST_WIDE_INT c[2];
491 HOST_WIDE_INT ch;
492 unsigned int i, j;
494 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
496 c[0] = 0;
497 c[1] = 0;
498 ch = 1;
499 for (i = 0; i < GET_MODE_SIZE (mode); i++)
501 j = i;
502 if (WORDS_BIG_ENDIAN)
503 j = GET_MODE_SIZE (mode) - i - 1;
504 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
505 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
506 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
507 j *= BITS_PER_UNIT;
508 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
510 if (ch)
511 ch = (unsigned char) str[i];
512 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
514 return immed_double_const (c[0], c[1], mode);
517 /* Cast a target constant CST to target CHAR and if that value fits into
518 host char type, return zero and put that value into variable pointed to by
519 P. */
521 static int
522 target_char_cast (tree cst, char *p)
524 unsigned HOST_WIDE_INT val, hostval;
526 if (!host_integerp (cst, 1)
527 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
528 return 1;
530 val = tree_low_cst (cst, 1);
531 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
532 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
534 hostval = val;
535 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
536 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
538 if (val != hostval)
539 return 1;
541 *p = hostval;
542 return 0;
545 /* Similar to save_expr, but assumes that arbitrary code is not executed
546 in between the multiple evaluations. In particular, we assume that a
547 non-addressable local variable will not be modified. */
549 static tree
550 builtin_save_expr (tree exp)
552 if (TREE_ADDRESSABLE (exp) == 0
553 && (TREE_CODE (exp) == PARM_DECL
554 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
555 return exp;
557 return save_expr (exp);
560 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
561 times to get the address of either a higher stack frame, or a return
562 address located within it (depending on FNDECL_CODE). */
564 static rtx
565 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
567 int i;
569 #ifdef INITIAL_FRAME_ADDRESS_RTX
570 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
571 #else
572 rtx tem;
574 /* For a zero count with __builtin_return_address, we don't care what
575 frame address we return, because target-specific definitions will
576 override us. Therefore frame pointer elimination is OK, and using
577 the soft frame pointer is OK.
579 For a nonzero count, or a zero count with __builtin_frame_address,
580 we require a stable offset from the current frame pointer to the
581 previous one, so we must use the hard frame pointer, and
582 we must disable frame pointer elimination. */
583 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
584 tem = frame_pointer_rtx;
585 else
587 tem = hard_frame_pointer_rtx;
589 /* Tell reload not to eliminate the frame pointer. */
590 current_function_accesses_prior_frames = 1;
592 #endif
594 /* Some machines need special handling before we can access
595 arbitrary frames. For example, on the SPARC, we must first flush
596 all register windows to the stack. */
597 #ifdef SETUP_FRAME_ADDRESSES
598 if (count > 0)
599 SETUP_FRAME_ADDRESSES ();
600 #endif
602 /* On the SPARC, the return address is not in the frame, it is in a
603 register. There is no way to access it off of the current frame
604 pointer, but it can be accessed off the previous frame pointer by
605 reading the value from the register window save area. */
606 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
607 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
608 count--;
609 #endif
611 /* Scan back COUNT frames to the specified frame. */
612 for (i = 0; i < count; i++)
614 /* Assume the dynamic chain pointer is in the word that the
615 frame address points to, unless otherwise specified. */
616 #ifdef DYNAMIC_CHAIN_ADDRESS
617 tem = DYNAMIC_CHAIN_ADDRESS (tem);
618 #endif
619 tem = memory_address (Pmode, tem);
620 tem = gen_frame_mem (Pmode, tem);
621 tem = copy_to_reg (tem);
624 /* For __builtin_frame_address, return what we've got. But, on
625 the SPARC for example, we may have to add a bias. */
626 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
627 #ifdef FRAME_ADDR_RTX
628 return FRAME_ADDR_RTX (tem);
629 #else
630 return tem;
631 #endif
633 /* For __builtin_return_address, get the return address from that frame. */
634 #ifdef RETURN_ADDR_RTX
635 tem = RETURN_ADDR_RTX (count, tem);
636 #else
637 tem = memory_address (Pmode,
638 plus_constant (tem, GET_MODE_SIZE (Pmode)));
639 tem = gen_frame_mem (Pmode, tem);
640 #endif
641 return tem;
644 /* Alias set used for setjmp buffer. */
645 static alias_set_type setjmp_alias_set = -1;
647 /* Construct the leading half of a __builtin_setjmp call. Control will
648 return to RECEIVER_LABEL. This is also called directly by the SJLJ
649 exception handling code. */
651 void
652 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
654 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
655 rtx stack_save;
656 rtx mem;
658 if (setjmp_alias_set == -1)
659 setjmp_alias_set = new_alias_set ();
661 buf_addr = convert_memory_address (Pmode, buf_addr);
663 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
665 /* We store the frame pointer and the address of receiver_label in
666 the buffer and use the rest of it for the stack save area, which
667 is machine-dependent. */
669 mem = gen_rtx_MEM (Pmode, buf_addr);
670 set_mem_alias_set (mem, setjmp_alias_set);
671 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
673 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
674 set_mem_alias_set (mem, setjmp_alias_set);
676 emit_move_insn (validize_mem (mem),
677 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
679 stack_save = gen_rtx_MEM (sa_mode,
680 plus_constant (buf_addr,
681 2 * GET_MODE_SIZE (Pmode)));
682 set_mem_alias_set (stack_save, setjmp_alias_set);
683 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
685 /* If there is further processing to do, do it. */
686 #ifdef HAVE_builtin_setjmp_setup
687 if (HAVE_builtin_setjmp_setup)
688 emit_insn (gen_builtin_setjmp_setup (buf_addr));
689 #endif
691 /* Tell optimize_save_area_alloca that extra work is going to
692 need to go on during alloca. */
693 current_function_calls_setjmp = 1;
695 /* We have a nonlocal label. */
696 current_function_has_nonlocal_label = 1;
699 /* Construct the trailing part of a __builtin_setjmp call. This is
700 also called directly by the SJLJ exception handling code. */
702 void
703 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
705 /* Clobber the FP when we get here, so we have to make sure it's
706 marked as used by this function. */
707 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
709 /* Mark the static chain as clobbered here so life information
710 doesn't get messed up for it. */
711 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
713 /* Now put in the code to restore the frame pointer, and argument
714 pointer, if needed. */
715 #ifdef HAVE_nonlocal_goto
716 if (! HAVE_nonlocal_goto)
717 #endif
719 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
720 /* This might change the hard frame pointer in ways that aren't
721 apparent to early optimization passes, so force a clobber. */
722 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
725 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
726 if (fixed_regs[ARG_POINTER_REGNUM])
728 #ifdef ELIMINABLE_REGS
729 size_t i;
730 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
732 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
733 if (elim_regs[i].from == ARG_POINTER_REGNUM
734 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
735 break;
737 if (i == ARRAY_SIZE (elim_regs))
738 #endif
740 /* Now restore our arg pointer from the address at which it
741 was saved in our stack frame. */
742 emit_move_insn (virtual_incoming_args_rtx,
743 copy_to_reg (get_arg_pointer_save_area ()));
746 #endif
748 #ifdef HAVE_builtin_setjmp_receiver
749 if (HAVE_builtin_setjmp_receiver)
750 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
751 else
752 #endif
753 #ifdef HAVE_nonlocal_goto_receiver
754 if (HAVE_nonlocal_goto_receiver)
755 emit_insn (gen_nonlocal_goto_receiver ());
756 else
757 #endif
758 { /* Nothing */ }
760 /* We must not allow the code we just generated to be reordered by
761 scheduling. Specifically, the update of the frame pointer must
762 happen immediately, not later. */
763 emit_insn (gen_blockage ());
766 /* __builtin_longjmp is passed a pointer to an array of five words (not
767 all will be used on all machines). It operates similarly to the C
768 library function of the same name, but is more efficient. Much of
769 the code below is copied from the handling of non-local gotos. */
771 static void
772 expand_builtin_longjmp (rtx buf_addr, rtx value)
774 rtx fp, lab, stack, insn, last;
775 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
777 if (setjmp_alias_set == -1)
778 setjmp_alias_set = new_alias_set ();
780 buf_addr = convert_memory_address (Pmode, buf_addr);
782 buf_addr = force_reg (Pmode, buf_addr);
784 /* We used to store value in static_chain_rtx, but that fails if pointers
785 are smaller than integers. We instead require that the user must pass
786 a second argument of 1, because that is what builtin_setjmp will
787 return. This also makes EH slightly more efficient, since we are no
788 longer copying around a value that we don't care about. */
789 gcc_assert (value == const1_rtx);
791 last = get_last_insn ();
792 #ifdef HAVE_builtin_longjmp
793 if (HAVE_builtin_longjmp)
794 emit_insn (gen_builtin_longjmp (buf_addr));
795 else
796 #endif
798 fp = gen_rtx_MEM (Pmode, buf_addr);
799 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
800 GET_MODE_SIZE (Pmode)));
802 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
803 2 * GET_MODE_SIZE (Pmode)));
804 set_mem_alias_set (fp, setjmp_alias_set);
805 set_mem_alias_set (lab, setjmp_alias_set);
806 set_mem_alias_set (stack, setjmp_alias_set);
808 /* Pick up FP, label, and SP from the block and jump. This code is
809 from expand_goto in stmt.c; see there for detailed comments. */
810 #ifdef HAVE_nonlocal_goto
811 if (HAVE_nonlocal_goto)
812 /* We have to pass a value to the nonlocal_goto pattern that will
813 get copied into the static_chain pointer, but it does not matter
814 what that value is, because builtin_setjmp does not use it. */
815 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
816 else
817 #endif
819 lab = copy_to_reg (lab);
821 emit_insn (gen_rtx_CLOBBER (VOIDmode,
822 gen_rtx_MEM (BLKmode,
823 gen_rtx_SCRATCH (VOIDmode))));
824 emit_insn (gen_rtx_CLOBBER (VOIDmode,
825 gen_rtx_MEM (BLKmode,
826 hard_frame_pointer_rtx)));
828 emit_move_insn (hard_frame_pointer_rtx, fp);
829 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
831 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
832 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
833 emit_indirect_jump (lab);
837 /* Search backwards and mark the jump insn as a non-local goto.
838 Note that this precludes the use of __builtin_longjmp to a
839 __builtin_setjmp target in the same function. However, we've
840 already cautioned the user that these functions are for
841 internal exception handling use only. */
842 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
844 gcc_assert (insn != last);
846 if (JUMP_P (insn))
848 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
849 REG_NOTES (insn));
850 break;
852 else if (CALL_P (insn))
853 break;
857 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
858 and the address of the save area. */
860 static rtx
861 expand_builtin_nonlocal_goto (tree exp)
863 tree t_label, t_save_area;
864 rtx r_label, r_save_area, r_fp, r_sp, insn;
866 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
867 return NULL_RTX;
869 t_label = CALL_EXPR_ARG (exp, 0);
870 t_save_area = CALL_EXPR_ARG (exp, 1);
872 r_label = expand_normal (t_label);
873 r_label = convert_memory_address (Pmode, r_label);
874 r_save_area = expand_normal (t_save_area);
875 r_save_area = convert_memory_address (Pmode, r_save_area);
876 r_fp = gen_rtx_MEM (Pmode, r_save_area);
877 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
878 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
880 current_function_has_nonlocal_goto = 1;
882 #ifdef HAVE_nonlocal_goto
883 /* ??? We no longer need to pass the static chain value, afaik. */
884 if (HAVE_nonlocal_goto)
885 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
886 else
887 #endif
889 r_label = copy_to_reg (r_label);
891 emit_insn (gen_rtx_CLOBBER (VOIDmode,
892 gen_rtx_MEM (BLKmode,
893 gen_rtx_SCRATCH (VOIDmode))));
895 emit_insn (gen_rtx_CLOBBER (VOIDmode,
896 gen_rtx_MEM (BLKmode,
897 hard_frame_pointer_rtx)));
899 /* Restore frame pointer for containing function.
900 This sets the actual hard register used for the frame pointer
901 to the location of the function's incoming static chain info.
902 The non-local goto handler will then adjust it to contain the
903 proper value and reload the argument pointer, if needed. */
904 emit_move_insn (hard_frame_pointer_rtx, r_fp);
905 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
907 /* USE of hard_frame_pointer_rtx added for consistency;
908 not clear if really needed. */
909 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
910 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
912 /* If the architecture is using a GP register, we must
913 conservatively assume that the target function makes use of it.
914 The prologue of functions with nonlocal gotos must therefore
915 initialize the GP register to the appropriate value, and we
916 must then make sure that this value is live at the point
917 of the jump. (Note that this doesn't necessarily apply
918 to targets with a nonlocal_goto pattern; they are free
919 to implement it in their own way. Note also that this is
920 a no-op if the GP register is a global invariant.) */
921 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
922 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
923 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
925 emit_indirect_jump (r_label);
928 /* Search backwards to the jump insn and mark it as a
929 non-local goto. */
930 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
932 if (JUMP_P (insn))
934 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
935 const0_rtx, REG_NOTES (insn));
936 break;
938 else if (CALL_P (insn))
939 break;
942 return const0_rtx;
945 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
946 (not all will be used on all machines) that was passed to __builtin_setjmp.
947 It updates the stack pointer in that block to correspond to the current
948 stack pointer. */
950 static void
951 expand_builtin_update_setjmp_buf (rtx buf_addr)
953 enum machine_mode sa_mode = Pmode;
954 rtx stack_save;
957 #ifdef HAVE_save_stack_nonlocal
958 if (HAVE_save_stack_nonlocal)
959 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
960 #endif
961 #ifdef STACK_SAVEAREA_MODE
962 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
963 #endif
965 stack_save
966 = gen_rtx_MEM (sa_mode,
967 memory_address
968 (sa_mode,
969 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
971 #ifdef HAVE_setjmp
972 if (HAVE_setjmp)
973 emit_insn (gen_setjmp ());
974 #endif
976 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
979 /* Expand a call to __builtin_prefetch. For a target that does not support
980 data prefetch, evaluate the memory address argument in case it has side
981 effects. */
983 static void
984 expand_builtin_prefetch (tree exp)
986 tree arg0, arg1, arg2;
987 int nargs;
988 rtx op0, op1, op2;
990 if (!validate_arglist (exp, POINTER_TYPE, 0))
991 return;
993 arg0 = CALL_EXPR_ARG (exp, 0);
995 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
996 zero (read) and argument 2 (locality) defaults to 3 (high degree of
997 locality). */
998 nargs = call_expr_nargs (exp);
999 if (nargs > 1)
1000 arg1 = CALL_EXPR_ARG (exp, 1);
1001 else
1002 arg1 = integer_zero_node;
1003 if (nargs > 2)
1004 arg2 = CALL_EXPR_ARG (exp, 2);
1005 else
1006 arg2 = build_int_cst (NULL_TREE, 3);
1008 /* Argument 0 is an address. */
1009 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1011 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1012 if (TREE_CODE (arg1) != INTEGER_CST)
1014 error ("second argument to %<__builtin_prefetch%> must be a constant");
1015 arg1 = integer_zero_node;
1017 op1 = expand_normal (arg1);
1018 /* Argument 1 must be either zero or one. */
1019 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1021 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1022 " using zero");
1023 op1 = const0_rtx;
1026 /* Argument 2 (locality) must be a compile-time constant int. */
1027 if (TREE_CODE (arg2) != INTEGER_CST)
1029 error ("third argument to %<__builtin_prefetch%> must be a constant");
1030 arg2 = integer_zero_node;
1032 op2 = expand_normal (arg2);
1033 /* Argument 2 must be 0, 1, 2, or 3. */
1034 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1036 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1037 op2 = const0_rtx;
1040 #ifdef HAVE_prefetch
1041 if (HAVE_prefetch)
1043 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1044 (op0,
1045 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1046 || (GET_MODE (op0) != Pmode))
1048 op0 = convert_memory_address (Pmode, op0);
1049 op0 = force_reg (Pmode, op0);
1051 emit_insn (gen_prefetch (op0, op1, op2));
1053 #endif
1055 /* Don't do anything with direct references to volatile memory, but
1056 generate code to handle other side effects. */
1057 if (!MEM_P (op0) && side_effects_p (op0))
1058 emit_insn (op0);
1061 /* Get a MEM rtx for expression EXP which is the address of an operand
1062 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1063 the maximum length of the block of memory that might be accessed or
1064 NULL if unknown. */
1066 static rtx
1067 get_memory_rtx (tree exp, tree len)
1069 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1070 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1072 /* Get an expression we can use to find the attributes to assign to MEM.
1073 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1074 we can. First remove any nops. */
1075 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR)
1076 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1077 exp = TREE_OPERAND (exp, 0);
1079 if (TREE_CODE (exp) == ADDR_EXPR)
1080 exp = TREE_OPERAND (exp, 0);
1081 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1082 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1083 else
1084 exp = NULL;
1086 /* Honor attributes derived from exp, except for the alias set
1087 (as builtin stringops may alias with anything) and the size
1088 (as stringops may access multiple array elements). */
1089 if (exp)
1091 set_mem_attributes (mem, exp, 0);
1093 /* Allow the string and memory builtins to overflow from one
1094 field into another, see http://gcc.gnu.org/PR23561.
1095 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1096 memory accessed by the string or memory builtin will fit
1097 within the field. */
1098 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1100 tree mem_expr = MEM_EXPR (mem);
1101 HOST_WIDE_INT offset = -1, length = -1;
1102 tree inner = exp;
1104 while (TREE_CODE (inner) == ARRAY_REF
1105 || TREE_CODE (inner) == NOP_EXPR
1106 || TREE_CODE (inner) == CONVERT_EXPR
1107 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1108 || TREE_CODE (inner) == SAVE_EXPR)
1109 inner = TREE_OPERAND (inner, 0);
1111 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1113 if (MEM_OFFSET (mem)
1114 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1115 offset = INTVAL (MEM_OFFSET (mem));
1117 if (offset >= 0 && len && host_integerp (len, 0))
1118 length = tree_low_cst (len, 0);
1120 while (TREE_CODE (inner) == COMPONENT_REF)
1122 tree field = TREE_OPERAND (inner, 1);
1123 gcc_assert (! DECL_BIT_FIELD (field));
1124 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1125 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1127 if (length >= 0
1128 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1129 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1131 HOST_WIDE_INT size
1132 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1133 /* If we can prove the memory starting at XEXP (mem, 0)
1134 and ending at XEXP (mem, 0) + LENGTH will fit into
1135 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1136 if (offset <= size
1137 && length <= size
1138 && offset + length <= size)
1139 break;
1142 if (offset >= 0
1143 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1144 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1145 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1146 / BITS_PER_UNIT;
1147 else
1149 offset = -1;
1150 length = -1;
1153 mem_expr = TREE_OPERAND (mem_expr, 0);
1154 inner = TREE_OPERAND (inner, 0);
1157 if (mem_expr == NULL)
1158 offset = -1;
1159 if (mem_expr != MEM_EXPR (mem))
1161 set_mem_expr (mem, mem_expr);
1162 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1165 set_mem_alias_set (mem, 0);
1166 set_mem_size (mem, NULL_RTX);
1169 return mem;
1172 /* Built-in functions to perform an untyped call and return. */
1174 /* For each register that may be used for calling a function, this
1175 gives a mode used to copy the register's value. VOIDmode indicates
1176 the register is not used for calling a function. If the machine
1177 has register windows, this gives only the outbound registers.
1178 INCOMING_REGNO gives the corresponding inbound register. */
1179 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1181 /* For each register that may be used for returning values, this gives
1182 a mode used to copy the register's value. VOIDmode indicates the
1183 register is not used for returning values. If the machine has
1184 register windows, this gives only the outbound registers.
1185 INCOMING_REGNO gives the corresponding inbound register. */
1186 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1188 /* For each register that may be used for calling a function, this
1189 gives the offset of that register into the block returned by
1190 __builtin_apply_args. 0 indicates that the register is not
1191 used for calling a function. */
1192 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1194 /* Return the size required for the block returned by __builtin_apply_args,
1195 and initialize apply_args_mode. */
1197 static int
1198 apply_args_size (void)
1200 static int size = -1;
1201 int align;
1202 unsigned int regno;
1203 enum machine_mode mode;
1205 /* The values computed by this function never change. */
1206 if (size < 0)
1208 /* The first value is the incoming arg-pointer. */
1209 size = GET_MODE_SIZE (Pmode);
1211 /* The second value is the structure value address unless this is
1212 passed as an "invisible" first argument. */
1213 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1214 size += GET_MODE_SIZE (Pmode);
1216 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1217 if (FUNCTION_ARG_REGNO_P (regno))
1219 mode = reg_raw_mode[regno];
1221 gcc_assert (mode != VOIDmode);
1223 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1224 if (size % align != 0)
1225 size = CEIL (size, align) * align;
1226 apply_args_reg_offset[regno] = size;
1227 size += GET_MODE_SIZE (mode);
1228 apply_args_mode[regno] = mode;
1230 else
1232 apply_args_mode[regno] = VOIDmode;
1233 apply_args_reg_offset[regno] = 0;
1236 return size;
1239 /* Return the size required for the block returned by __builtin_apply,
1240 and initialize apply_result_mode. */
1242 static int
1243 apply_result_size (void)
1245 static int size = -1;
1246 int align, regno;
1247 enum machine_mode mode;
1249 /* The values computed by this function never change. */
1250 if (size < 0)
1252 size = 0;
1254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1255 if (FUNCTION_VALUE_REGNO_P (regno))
1257 mode = reg_raw_mode[regno];
1259 gcc_assert (mode != VOIDmode);
1261 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1262 if (size % align != 0)
1263 size = CEIL (size, align) * align;
1264 size += GET_MODE_SIZE (mode);
1265 apply_result_mode[regno] = mode;
1267 else
1268 apply_result_mode[regno] = VOIDmode;
1270 /* Allow targets that use untyped_call and untyped_return to override
1271 the size so that machine-specific information can be stored here. */
1272 #ifdef APPLY_RESULT_SIZE
1273 size = APPLY_RESULT_SIZE;
1274 #endif
1276 return size;
1279 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1280 /* Create a vector describing the result block RESULT. If SAVEP is true,
1281 the result block is used to save the values; otherwise it is used to
1282 restore the values. */
1284 static rtx
1285 result_vector (int savep, rtx result)
1287 int regno, size, align, nelts;
1288 enum machine_mode mode;
1289 rtx reg, mem;
1290 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1292 size = nelts = 0;
1293 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1294 if ((mode = apply_result_mode[regno]) != VOIDmode)
1296 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1297 if (size % align != 0)
1298 size = CEIL (size, align) * align;
1299 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1300 mem = adjust_address (result, mode, size);
1301 savevec[nelts++] = (savep
1302 ? gen_rtx_SET (VOIDmode, mem, reg)
1303 : gen_rtx_SET (VOIDmode, reg, mem));
1304 size += GET_MODE_SIZE (mode);
1306 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1308 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1310 /* Save the state required to perform an untyped call with the same
1311 arguments as were passed to the current function. */
1313 static rtx
1314 expand_builtin_apply_args_1 (void)
1316 rtx registers, tem;
1317 int size, align, regno;
1318 enum machine_mode mode;
1319 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1321 /* Create a block where the arg-pointer, structure value address,
1322 and argument registers can be saved. */
1323 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1325 /* Walk past the arg-pointer and structure value address. */
1326 size = GET_MODE_SIZE (Pmode);
1327 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1328 size += GET_MODE_SIZE (Pmode);
1330 /* Save each register used in calling a function to the block. */
1331 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1332 if ((mode = apply_args_mode[regno]) != VOIDmode)
1334 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1335 if (size % align != 0)
1336 size = CEIL (size, align) * align;
1338 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1340 emit_move_insn (adjust_address (registers, mode, size), tem);
1341 size += GET_MODE_SIZE (mode);
1344 /* Save the arg pointer to the block. */
1345 tem = copy_to_reg (virtual_incoming_args_rtx);
1346 #ifdef STACK_GROWS_DOWNWARD
1347 /* We need the pointer as the caller actually passed them to us, not
1348 as we might have pretended they were passed. Make sure it's a valid
1349 operand, as emit_move_insn isn't expected to handle a PLUS. */
1351 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1352 NULL_RTX);
1353 #endif
1354 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1356 size = GET_MODE_SIZE (Pmode);
1358 /* Save the structure value address unless this is passed as an
1359 "invisible" first argument. */
1360 if (struct_incoming_value)
1362 emit_move_insn (adjust_address (registers, Pmode, size),
1363 copy_to_reg (struct_incoming_value));
1364 size += GET_MODE_SIZE (Pmode);
1367 /* Return the address of the block. */
1368 return copy_addr_to_reg (XEXP (registers, 0));
1371 /* __builtin_apply_args returns block of memory allocated on
1372 the stack into which is stored the arg pointer, structure
1373 value address, static chain, and all the registers that might
1374 possibly be used in performing a function call. The code is
1375 moved to the start of the function so the incoming values are
1376 saved. */
1378 static rtx
1379 expand_builtin_apply_args (void)
1381 /* Don't do __builtin_apply_args more than once in a function.
1382 Save the result of the first call and reuse it. */
1383 if (apply_args_value != 0)
1384 return apply_args_value;
1386 /* When this function is called, it means that registers must be
1387 saved on entry to this function. So we migrate the
1388 call to the first insn of this function. */
1389 rtx temp;
1390 rtx seq;
1392 start_sequence ();
1393 temp = expand_builtin_apply_args_1 ();
1394 seq = get_insns ();
1395 end_sequence ();
1397 apply_args_value = temp;
1399 /* Put the insns after the NOTE that starts the function.
1400 If this is inside a start_sequence, make the outer-level insn
1401 chain current, so the code is placed at the start of the
1402 function. */
1403 push_topmost_sequence ();
1404 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1405 pop_topmost_sequence ();
1406 return temp;
1410 /* Perform an untyped call and save the state required to perform an
1411 untyped return of whatever value was returned by the given function. */
1413 static rtx
1414 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1416 int size, align, regno;
1417 enum machine_mode mode;
1418 rtx incoming_args, result, reg, dest, src, call_insn;
1419 rtx old_stack_level = 0;
1420 rtx call_fusage = 0;
1421 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1423 arguments = convert_memory_address (Pmode, arguments);
1425 /* Create a block where the return registers can be saved. */
1426 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1428 /* Fetch the arg pointer from the ARGUMENTS block. */
1429 incoming_args = gen_reg_rtx (Pmode);
1430 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1431 #ifndef STACK_GROWS_DOWNWARD
1432 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1433 incoming_args, 0, OPTAB_LIB_WIDEN);
1434 #endif
1436 /* Push a new argument block and copy the arguments. Do not allow
1437 the (potential) memcpy call below to interfere with our stack
1438 manipulations. */
1439 do_pending_stack_adjust ();
1440 NO_DEFER_POP;
1442 /* Save the stack with nonlocal if available. */
1443 #ifdef HAVE_save_stack_nonlocal
1444 if (HAVE_save_stack_nonlocal)
1445 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1446 else
1447 #endif
1448 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1450 /* Allocate a block of memory onto the stack and copy the memory
1451 arguments to the outgoing arguments address. */
1452 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1453 dest = virtual_outgoing_args_rtx;
1454 #ifndef STACK_GROWS_DOWNWARD
1455 if (GET_CODE (argsize) == CONST_INT)
1456 dest = plus_constant (dest, -INTVAL (argsize));
1457 else
1458 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1459 #endif
1460 dest = gen_rtx_MEM (BLKmode, dest);
1461 set_mem_align (dest, PARM_BOUNDARY);
1462 src = gen_rtx_MEM (BLKmode, incoming_args);
1463 set_mem_align (src, PARM_BOUNDARY);
1464 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1466 /* Refer to the argument block. */
1467 apply_args_size ();
1468 arguments = gen_rtx_MEM (BLKmode, arguments);
1469 set_mem_align (arguments, PARM_BOUNDARY);
1471 /* Walk past the arg-pointer and structure value address. */
1472 size = GET_MODE_SIZE (Pmode);
1473 if (struct_value)
1474 size += GET_MODE_SIZE (Pmode);
1476 /* Restore each of the registers previously saved. Make USE insns
1477 for each of these registers for use in making the call. */
1478 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1479 if ((mode = apply_args_mode[regno]) != VOIDmode)
1481 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1482 if (size % align != 0)
1483 size = CEIL (size, align) * align;
1484 reg = gen_rtx_REG (mode, regno);
1485 emit_move_insn (reg, adjust_address (arguments, mode, size));
1486 use_reg (&call_fusage, reg);
1487 size += GET_MODE_SIZE (mode);
1490 /* Restore the structure value address unless this is passed as an
1491 "invisible" first argument. */
1492 size = GET_MODE_SIZE (Pmode);
1493 if (struct_value)
1495 rtx value = gen_reg_rtx (Pmode);
1496 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1497 emit_move_insn (struct_value, value);
1498 if (REG_P (struct_value))
1499 use_reg (&call_fusage, struct_value);
1500 size += GET_MODE_SIZE (Pmode);
1503 /* All arguments and registers used for the call are set up by now! */
1504 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1506 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1507 and we don't want to load it into a register as an optimization,
1508 because prepare_call_address already did it if it should be done. */
1509 if (GET_CODE (function) != SYMBOL_REF)
1510 function = memory_address (FUNCTION_MODE, function);
1512 /* Generate the actual call instruction and save the return value. */
1513 #ifdef HAVE_untyped_call
1514 if (HAVE_untyped_call)
1515 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1516 result, result_vector (1, result)));
1517 else
1518 #endif
1519 #ifdef HAVE_call_value
1520 if (HAVE_call_value)
1522 rtx valreg = 0;
1524 /* Locate the unique return register. It is not possible to
1525 express a call that sets more than one return register using
1526 call_value; use untyped_call for that. In fact, untyped_call
1527 only needs to save the return registers in the given block. */
1528 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1529 if ((mode = apply_result_mode[regno]) != VOIDmode)
1531 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1533 valreg = gen_rtx_REG (mode, regno);
1536 emit_call_insn (GEN_CALL_VALUE (valreg,
1537 gen_rtx_MEM (FUNCTION_MODE, function),
1538 const0_rtx, NULL_RTX, const0_rtx));
1540 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1542 else
1543 #endif
1544 gcc_unreachable ();
1546 /* Find the CALL insn we just emitted, and attach the register usage
1547 information. */
1548 call_insn = last_call_insn ();
1549 add_function_usage_to (call_insn, call_fusage);
1551 /* Restore the stack. */
1552 #ifdef HAVE_save_stack_nonlocal
1553 if (HAVE_save_stack_nonlocal)
1554 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1555 else
1556 #endif
1557 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1559 OK_DEFER_POP;
1561 /* Return the address of the result block. */
1562 result = copy_addr_to_reg (XEXP (result, 0));
1563 return convert_memory_address (ptr_mode, result);
1566 /* Perform an untyped return. */
1568 static void
1569 expand_builtin_return (rtx result)
1571 int size, align, regno;
1572 enum machine_mode mode;
1573 rtx reg;
1574 rtx call_fusage = 0;
1576 result = convert_memory_address (Pmode, result);
1578 apply_result_size ();
1579 result = gen_rtx_MEM (BLKmode, result);
1581 #ifdef HAVE_untyped_return
1582 if (HAVE_untyped_return)
1584 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1585 emit_barrier ();
1586 return;
1588 #endif
1590 /* Restore the return value and note that each value is used. */
1591 size = 0;
1592 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1593 if ((mode = apply_result_mode[regno]) != VOIDmode)
1595 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1596 if (size % align != 0)
1597 size = CEIL (size, align) * align;
1598 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1599 emit_move_insn (reg, adjust_address (result, mode, size));
1601 push_to_sequence (call_fusage);
1602 emit_insn (gen_rtx_USE (VOIDmode, reg));
1603 call_fusage = get_insns ();
1604 end_sequence ();
1605 size += GET_MODE_SIZE (mode);
1608 /* Put the USE insns before the return. */
1609 emit_insn (call_fusage);
1611 /* Return whatever values was restored by jumping directly to the end
1612 of the function. */
1613 expand_naked_return ();
1616 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1618 static enum type_class
1619 type_to_class (tree type)
1621 switch (TREE_CODE (type))
1623 case VOID_TYPE: return void_type_class;
1624 case INTEGER_TYPE: return integer_type_class;
1625 case ENUMERAL_TYPE: return enumeral_type_class;
1626 case BOOLEAN_TYPE: return boolean_type_class;
1627 case POINTER_TYPE: return pointer_type_class;
1628 case REFERENCE_TYPE: return reference_type_class;
1629 case OFFSET_TYPE: return offset_type_class;
1630 case REAL_TYPE: return real_type_class;
1631 case COMPLEX_TYPE: return complex_type_class;
1632 case FUNCTION_TYPE: return function_type_class;
1633 case METHOD_TYPE: return method_type_class;
1634 case RECORD_TYPE: return record_type_class;
1635 case UNION_TYPE:
1636 case QUAL_UNION_TYPE: return union_type_class;
1637 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1638 ? string_type_class : array_type_class);
1639 case LANG_TYPE: return lang_type_class;
1640 default: return no_type_class;
1644 /* Expand a call EXP to __builtin_classify_type. */
1646 static rtx
1647 expand_builtin_classify_type (tree exp)
1649 if (call_expr_nargs (exp))
1650 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1651 return GEN_INT (no_type_class);
1654 /* This helper macro, meant to be used in mathfn_built_in below,
1655 determines which among a set of three builtin math functions is
1656 appropriate for a given type mode. The `F' and `L' cases are
1657 automatically generated from the `double' case. */
1658 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1659 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1660 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1661 fcodel = BUILT_IN_MATHFN##L ; break;
1662 /* Similar to above, but appends _R after any F/L suffix. */
1663 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1664 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1665 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1666 fcodel = BUILT_IN_MATHFN##L_R ; break;
1668 /* Return mathematic function equivalent to FN but operating directly
1669 on TYPE, if available. If we can't do the conversion, return zero. */
1670 tree
1671 mathfn_built_in (tree type, enum built_in_function fn)
1673 enum built_in_function fcode, fcodef, fcodel;
1675 switch (fn)
1677 CASE_MATHFN (BUILT_IN_ACOS)
1678 CASE_MATHFN (BUILT_IN_ACOSH)
1679 CASE_MATHFN (BUILT_IN_ASIN)
1680 CASE_MATHFN (BUILT_IN_ASINH)
1681 CASE_MATHFN (BUILT_IN_ATAN)
1682 CASE_MATHFN (BUILT_IN_ATAN2)
1683 CASE_MATHFN (BUILT_IN_ATANH)
1684 CASE_MATHFN (BUILT_IN_CBRT)
1685 CASE_MATHFN (BUILT_IN_CEIL)
1686 CASE_MATHFN (BUILT_IN_CEXPI)
1687 CASE_MATHFN (BUILT_IN_COPYSIGN)
1688 CASE_MATHFN (BUILT_IN_COS)
1689 CASE_MATHFN (BUILT_IN_COSH)
1690 CASE_MATHFN (BUILT_IN_DREM)
1691 CASE_MATHFN (BUILT_IN_ERF)
1692 CASE_MATHFN (BUILT_IN_ERFC)
1693 CASE_MATHFN (BUILT_IN_EXP)
1694 CASE_MATHFN (BUILT_IN_EXP10)
1695 CASE_MATHFN (BUILT_IN_EXP2)
1696 CASE_MATHFN (BUILT_IN_EXPM1)
1697 CASE_MATHFN (BUILT_IN_FABS)
1698 CASE_MATHFN (BUILT_IN_FDIM)
1699 CASE_MATHFN (BUILT_IN_FLOOR)
1700 CASE_MATHFN (BUILT_IN_FMA)
1701 CASE_MATHFN (BUILT_IN_FMAX)
1702 CASE_MATHFN (BUILT_IN_FMIN)
1703 CASE_MATHFN (BUILT_IN_FMOD)
1704 CASE_MATHFN (BUILT_IN_FREXP)
1705 CASE_MATHFN (BUILT_IN_GAMMA)
1706 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1707 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1708 CASE_MATHFN (BUILT_IN_HYPOT)
1709 CASE_MATHFN (BUILT_IN_ILOGB)
1710 CASE_MATHFN (BUILT_IN_INF)
1711 CASE_MATHFN (BUILT_IN_ISINF)
1712 CASE_MATHFN (BUILT_IN_J0)
1713 CASE_MATHFN (BUILT_IN_J1)
1714 CASE_MATHFN (BUILT_IN_JN)
1715 CASE_MATHFN (BUILT_IN_LCEIL)
1716 CASE_MATHFN (BUILT_IN_LDEXP)
1717 CASE_MATHFN (BUILT_IN_LFLOOR)
1718 CASE_MATHFN (BUILT_IN_LGAMMA)
1719 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1720 CASE_MATHFN (BUILT_IN_LLCEIL)
1721 CASE_MATHFN (BUILT_IN_LLFLOOR)
1722 CASE_MATHFN (BUILT_IN_LLRINT)
1723 CASE_MATHFN (BUILT_IN_LLROUND)
1724 CASE_MATHFN (BUILT_IN_LOG)
1725 CASE_MATHFN (BUILT_IN_LOG10)
1726 CASE_MATHFN (BUILT_IN_LOG1P)
1727 CASE_MATHFN (BUILT_IN_LOG2)
1728 CASE_MATHFN (BUILT_IN_LOGB)
1729 CASE_MATHFN (BUILT_IN_LRINT)
1730 CASE_MATHFN (BUILT_IN_LROUND)
1731 CASE_MATHFN (BUILT_IN_MODF)
1732 CASE_MATHFN (BUILT_IN_NAN)
1733 CASE_MATHFN (BUILT_IN_NANS)
1734 CASE_MATHFN (BUILT_IN_NEARBYINT)
1735 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1736 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1737 CASE_MATHFN (BUILT_IN_POW)
1738 CASE_MATHFN (BUILT_IN_POWI)
1739 CASE_MATHFN (BUILT_IN_POW10)
1740 CASE_MATHFN (BUILT_IN_REMAINDER)
1741 CASE_MATHFN (BUILT_IN_REMQUO)
1742 CASE_MATHFN (BUILT_IN_RINT)
1743 CASE_MATHFN (BUILT_IN_ROUND)
1744 CASE_MATHFN (BUILT_IN_SCALB)
1745 CASE_MATHFN (BUILT_IN_SCALBLN)
1746 CASE_MATHFN (BUILT_IN_SCALBN)
1747 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1748 CASE_MATHFN (BUILT_IN_SIN)
1749 CASE_MATHFN (BUILT_IN_SINCOS)
1750 CASE_MATHFN (BUILT_IN_SINH)
1751 CASE_MATHFN (BUILT_IN_SQRT)
1752 CASE_MATHFN (BUILT_IN_TAN)
1753 CASE_MATHFN (BUILT_IN_TANH)
1754 CASE_MATHFN (BUILT_IN_TGAMMA)
1755 CASE_MATHFN (BUILT_IN_TRUNC)
1756 CASE_MATHFN (BUILT_IN_Y0)
1757 CASE_MATHFN (BUILT_IN_Y1)
1758 CASE_MATHFN (BUILT_IN_YN)
1760 default:
1761 return NULL_TREE;
1764 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1765 return implicit_built_in_decls[fcode];
1766 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1767 return implicit_built_in_decls[fcodef];
1768 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1769 return implicit_built_in_decls[fcodel];
1770 else
1771 return NULL_TREE;
1774 /* If errno must be maintained, expand the RTL to check if the result,
1775 TARGET, of a built-in function call, EXP, is NaN, and if so set
1776 errno to EDOM. */
1778 static void
1779 expand_errno_check (tree exp, rtx target)
1781 rtx lab = gen_label_rtx ();
1783 /* Test the result; if it is NaN, set errno=EDOM because
1784 the argument was not in the domain. */
1785 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1786 0, lab);
1788 #ifdef TARGET_EDOM
1789 /* If this built-in doesn't throw an exception, set errno directly. */
1790 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1792 #ifdef GEN_ERRNO_RTX
1793 rtx errno_rtx = GEN_ERRNO_RTX;
1794 #else
1795 rtx errno_rtx
1796 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1797 #endif
1798 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1799 emit_label (lab);
1800 return;
1802 #endif
1804 /* Make sure the library call isn't expanded as a tail call. */
1805 CALL_EXPR_TAILCALL (exp) = 0;
1807 /* We can't set errno=EDOM directly; let the library call do it.
1808 Pop the arguments right away in case the call gets deleted. */
1809 NO_DEFER_POP;
1810 expand_call (exp, target, 0);
1811 OK_DEFER_POP;
1812 emit_label (lab);
1815 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1816 Return NULL_RTX if a normal call should be emitted rather than expanding
1817 the function in-line. EXP is the expression that is a call to the builtin
1818 function; if convenient, the result should be placed in TARGET.
1819 SUBTARGET may be used as the target for computing one of EXP's operands. */
1821 static rtx
1822 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1824 optab builtin_optab;
1825 rtx op0, insns, before_call;
1826 tree fndecl = get_callee_fndecl (exp);
1827 enum machine_mode mode;
1828 bool errno_set = false;
1829 tree arg;
1831 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1832 return NULL_RTX;
1834 arg = CALL_EXPR_ARG (exp, 0);
1836 switch (DECL_FUNCTION_CODE (fndecl))
1838 CASE_FLT_FN (BUILT_IN_SQRT):
1839 errno_set = ! tree_expr_nonnegative_p (arg);
1840 builtin_optab = sqrt_optab;
1841 break;
1842 CASE_FLT_FN (BUILT_IN_EXP):
1843 errno_set = true; builtin_optab = exp_optab; break;
1844 CASE_FLT_FN (BUILT_IN_EXP10):
1845 CASE_FLT_FN (BUILT_IN_POW10):
1846 errno_set = true; builtin_optab = exp10_optab; break;
1847 CASE_FLT_FN (BUILT_IN_EXP2):
1848 errno_set = true; builtin_optab = exp2_optab; break;
1849 CASE_FLT_FN (BUILT_IN_EXPM1):
1850 errno_set = true; builtin_optab = expm1_optab; break;
1851 CASE_FLT_FN (BUILT_IN_LOGB):
1852 errno_set = true; builtin_optab = logb_optab; break;
1853 CASE_FLT_FN (BUILT_IN_LOG):
1854 errno_set = true; builtin_optab = log_optab; break;
1855 CASE_FLT_FN (BUILT_IN_LOG10):
1856 errno_set = true; builtin_optab = log10_optab; break;
1857 CASE_FLT_FN (BUILT_IN_LOG2):
1858 errno_set = true; builtin_optab = log2_optab; break;
1859 CASE_FLT_FN (BUILT_IN_LOG1P):
1860 errno_set = true; builtin_optab = log1p_optab; break;
1861 CASE_FLT_FN (BUILT_IN_ASIN):
1862 builtin_optab = asin_optab; break;
1863 CASE_FLT_FN (BUILT_IN_ACOS):
1864 builtin_optab = acos_optab; break;
1865 CASE_FLT_FN (BUILT_IN_TAN):
1866 builtin_optab = tan_optab; break;
1867 CASE_FLT_FN (BUILT_IN_ATAN):
1868 builtin_optab = atan_optab; break;
1869 CASE_FLT_FN (BUILT_IN_FLOOR):
1870 builtin_optab = floor_optab; break;
1871 CASE_FLT_FN (BUILT_IN_CEIL):
1872 builtin_optab = ceil_optab; break;
1873 CASE_FLT_FN (BUILT_IN_TRUNC):
1874 builtin_optab = btrunc_optab; break;
1875 CASE_FLT_FN (BUILT_IN_ROUND):
1876 builtin_optab = round_optab; break;
1877 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1878 builtin_optab = nearbyint_optab;
1879 if (flag_trapping_math)
1880 break;
1881 /* Else fallthrough and expand as rint. */
1882 CASE_FLT_FN (BUILT_IN_RINT):
1883 builtin_optab = rint_optab; break;
1884 default:
1885 gcc_unreachable ();
1888 /* Make a suitable register to place result in. */
1889 mode = TYPE_MODE (TREE_TYPE (exp));
1891 if (! flag_errno_math || ! HONOR_NANS (mode))
1892 errno_set = false;
1894 /* Before working hard, check whether the instruction is available. */
1895 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1897 target = gen_reg_rtx (mode);
1899 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1900 need to expand the argument again. This way, we will not perform
1901 side-effects more the once. */
1902 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1904 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1906 start_sequence ();
1908 /* Compute into TARGET.
1909 Set TARGET to wherever the result comes back. */
1910 target = expand_unop (mode, builtin_optab, op0, target, 0);
1912 if (target != 0)
1914 if (errno_set)
1915 expand_errno_check (exp, target);
1917 /* Output the entire sequence. */
1918 insns = get_insns ();
1919 end_sequence ();
1920 emit_insn (insns);
1921 return target;
1924 /* If we were unable to expand via the builtin, stop the sequence
1925 (without outputting the insns) and call to the library function
1926 with the stabilized argument list. */
1927 end_sequence ();
1930 before_call = get_last_insn ();
1932 target = expand_call (exp, target, target == const0_rtx);
1934 /* If this is a sqrt operation and we don't care about errno, try to
1935 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1936 This allows the semantics of the libcall to be visible to the RTL
1937 optimizers. */
1938 if (builtin_optab == sqrt_optab && !errno_set)
1940 /* Search backwards through the insns emitted by expand_call looking
1941 for the instruction with the REG_RETVAL note. */
1942 rtx last = get_last_insn ();
1943 while (last != before_call)
1945 if (find_reg_note (last, REG_RETVAL, NULL))
1947 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1948 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1949 two elements, i.e. symbol_ref(sqrt) and the operand. */
1950 if (note
1951 && GET_CODE (note) == EXPR_LIST
1952 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1953 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1954 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1956 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1957 /* Check operand is a register with expected mode. */
1958 if (operand
1959 && REG_P (operand)
1960 && GET_MODE (operand) == mode)
1962 /* Replace the REG_EQUAL note with a SQRT rtx. */
1963 rtx equiv = gen_rtx_SQRT (mode, operand);
1964 set_unique_reg_note (last, REG_EQUAL, equiv);
1967 break;
1969 last = PREV_INSN (last);
1973 return target;
1976 /* Expand a call to the builtin binary math functions (pow and atan2).
1977 Return NULL_RTX if a normal call should be emitted rather than expanding the
1978 function in-line. EXP is the expression that is a call to the builtin
1979 function; if convenient, the result should be placed in TARGET.
1980 SUBTARGET may be used as the target for computing one of EXP's
1981 operands. */
1983 static rtx
1984 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1986 optab builtin_optab;
1987 rtx op0, op1, insns;
1988 int op1_type = REAL_TYPE;
1989 tree fndecl = get_callee_fndecl (exp);
1990 tree arg0, arg1;
1991 enum machine_mode mode;
1992 bool errno_set = true;
1994 switch (DECL_FUNCTION_CODE (fndecl))
1996 CASE_FLT_FN (BUILT_IN_SCALBN):
1997 CASE_FLT_FN (BUILT_IN_SCALBLN):
1998 CASE_FLT_FN (BUILT_IN_LDEXP):
1999 op1_type = INTEGER_TYPE;
2000 default:
2001 break;
2004 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2005 return NULL_RTX;
2007 arg0 = CALL_EXPR_ARG (exp, 0);
2008 arg1 = CALL_EXPR_ARG (exp, 1);
2010 switch (DECL_FUNCTION_CODE (fndecl))
2012 CASE_FLT_FN (BUILT_IN_POW):
2013 builtin_optab = pow_optab; break;
2014 CASE_FLT_FN (BUILT_IN_ATAN2):
2015 builtin_optab = atan2_optab; break;
2016 CASE_FLT_FN (BUILT_IN_SCALB):
2017 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2018 return 0;
2019 builtin_optab = scalb_optab; break;
2020 CASE_FLT_FN (BUILT_IN_SCALBN):
2021 CASE_FLT_FN (BUILT_IN_SCALBLN):
2022 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2023 return 0;
2024 /* Fall through... */
2025 CASE_FLT_FN (BUILT_IN_LDEXP):
2026 builtin_optab = ldexp_optab; break;
2027 CASE_FLT_FN (BUILT_IN_FMOD):
2028 builtin_optab = fmod_optab; break;
2029 CASE_FLT_FN (BUILT_IN_REMAINDER):
2030 CASE_FLT_FN (BUILT_IN_DREM):
2031 builtin_optab = remainder_optab; break;
2032 default:
2033 gcc_unreachable ();
2036 /* Make a suitable register to place result in. */
2037 mode = TYPE_MODE (TREE_TYPE (exp));
2039 /* Before working hard, check whether the instruction is available. */
2040 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2041 return NULL_RTX;
2043 target = gen_reg_rtx (mode);
2045 if (! flag_errno_math || ! HONOR_NANS (mode))
2046 errno_set = false;
2048 /* Always stabilize the argument list. */
2049 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2050 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2052 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2053 op1 = expand_normal (arg1);
2055 start_sequence ();
2057 /* Compute into TARGET.
2058 Set TARGET to wherever the result comes back. */
2059 target = expand_binop (mode, builtin_optab, op0, op1,
2060 target, 0, OPTAB_DIRECT);
2062 /* If we were unable to expand via the builtin, stop the sequence
2063 (without outputting the insns) and call to the library function
2064 with the stabilized argument list. */
2065 if (target == 0)
2067 end_sequence ();
2068 return expand_call (exp, target, target == const0_rtx);
2071 if (errno_set)
2072 expand_errno_check (exp, target);
2074 /* Output the entire sequence. */
2075 insns = get_insns ();
2076 end_sequence ();
2077 emit_insn (insns);
2079 return target;
2082 /* Expand a call to the builtin sin and cos math functions.
2083 Return NULL_RTX if a normal call should be emitted rather than expanding the
2084 function in-line. EXP is the expression that is a call to the builtin
2085 function; if convenient, the result should be placed in TARGET.
2086 SUBTARGET may be used as the target for computing one of EXP's
2087 operands. */
2089 static rtx
2090 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2092 optab builtin_optab;
2093 rtx op0, insns;
2094 tree fndecl = get_callee_fndecl (exp);
2095 enum machine_mode mode;
2096 tree arg;
2098 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2099 return NULL_RTX;
2101 arg = CALL_EXPR_ARG (exp, 0);
2103 switch (DECL_FUNCTION_CODE (fndecl))
2105 CASE_FLT_FN (BUILT_IN_SIN):
2106 CASE_FLT_FN (BUILT_IN_COS):
2107 builtin_optab = sincos_optab; break;
2108 default:
2109 gcc_unreachable ();
2112 /* Make a suitable register to place result in. */
2113 mode = TYPE_MODE (TREE_TYPE (exp));
2115 /* Check if sincos insn is available, otherwise fallback
2116 to sin or cos insn. */
2117 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2118 switch (DECL_FUNCTION_CODE (fndecl))
2120 CASE_FLT_FN (BUILT_IN_SIN):
2121 builtin_optab = sin_optab; break;
2122 CASE_FLT_FN (BUILT_IN_COS):
2123 builtin_optab = cos_optab; break;
2124 default:
2125 gcc_unreachable ();
2128 /* Before working hard, check whether the instruction is available. */
2129 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2131 target = gen_reg_rtx (mode);
2133 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2134 need to expand the argument again. This way, we will not perform
2135 side-effects more the once. */
2136 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2138 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2140 start_sequence ();
2142 /* Compute into TARGET.
2143 Set TARGET to wherever the result comes back. */
2144 if (builtin_optab == sincos_optab)
2146 int result;
2148 switch (DECL_FUNCTION_CODE (fndecl))
2150 CASE_FLT_FN (BUILT_IN_SIN):
2151 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2152 break;
2153 CASE_FLT_FN (BUILT_IN_COS):
2154 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2155 break;
2156 default:
2157 gcc_unreachable ();
2159 gcc_assert (result);
2161 else
2163 target = expand_unop (mode, builtin_optab, op0, target, 0);
2166 if (target != 0)
2168 /* Output the entire sequence. */
2169 insns = get_insns ();
2170 end_sequence ();
2171 emit_insn (insns);
2172 return target;
2175 /* If we were unable to expand via the builtin, stop the sequence
2176 (without outputting the insns) and call to the library function
2177 with the stabilized argument list. */
2178 end_sequence ();
2181 target = expand_call (exp, target, target == const0_rtx);
2183 return target;
2186 /* Expand a call to one of the builtin math functions that operate on
2187 floating point argument and output an integer result (ilogb, isinf,
2188 isnan, etc).
2189 Return 0 if a normal call should be emitted rather than expanding the
2190 function in-line. EXP is the expression that is a call to the builtin
2191 function; if convenient, the result should be placed in TARGET.
2192 SUBTARGET may be used as the target for computing one of EXP's operands. */
2194 static rtx
2195 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2197 optab builtin_optab = 0;
2198 enum insn_code icode = CODE_FOR_nothing;
2199 rtx op0;
2200 tree fndecl = get_callee_fndecl (exp);
2201 enum machine_mode mode;
2202 bool errno_set = false;
2203 tree arg;
2205 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2206 return NULL_RTX;
2208 arg = CALL_EXPR_ARG (exp, 0);
2210 switch (DECL_FUNCTION_CODE (fndecl))
2212 CASE_FLT_FN (BUILT_IN_ILOGB):
2213 errno_set = true; builtin_optab = ilogb_optab; break;
2214 CASE_FLT_FN (BUILT_IN_ISINF):
2215 builtin_optab = isinf_optab; break;
2216 case BUILT_IN_ISNORMAL:
2217 case BUILT_IN_ISFINITE:
2218 CASE_FLT_FN (BUILT_IN_FINITE):
2219 /* These builtins have no optabs (yet). */
2220 break;
2221 default:
2222 gcc_unreachable ();
2225 /* There's no easy way to detect the case we need to set EDOM. */
2226 if (flag_errno_math && errno_set)
2227 return NULL_RTX;
2229 /* Optab mode depends on the mode of the input argument. */
2230 mode = TYPE_MODE (TREE_TYPE (arg));
2232 if (builtin_optab)
2233 icode = optab_handler (builtin_optab, mode)->insn_code;
2235 /* Before working hard, check whether the instruction is available. */
2236 if (icode != CODE_FOR_nothing)
2238 /* Make a suitable register to place result in. */
2239 if (!target
2240 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2241 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2243 gcc_assert (insn_data[icode].operand[0].predicate
2244 (target, GET_MODE (target)));
2246 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2247 need to expand the argument again. This way, we will not perform
2248 side-effects more the once. */
2249 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2251 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2253 if (mode != GET_MODE (op0))
2254 op0 = convert_to_mode (mode, op0, 0);
2256 /* Compute into TARGET.
2257 Set TARGET to wherever the result comes back. */
2258 emit_unop_insn (icode, target, op0, UNKNOWN);
2259 return target;
2262 /* If there is no optab, try generic code. */
2263 switch (DECL_FUNCTION_CODE (fndecl))
2265 tree result;
2267 CASE_FLT_FN (BUILT_IN_ISINF):
2269 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2270 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2271 tree const type = TREE_TYPE (arg);
2272 REAL_VALUE_TYPE r;
2273 char buf[128];
2275 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2276 real_from_string (&r, buf);
2277 result = build_call_expr (isgr_fn, 2,
2278 fold_build1 (ABS_EXPR, type, arg),
2279 build_real (type, r));
2280 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2282 CASE_FLT_FN (BUILT_IN_FINITE):
2283 case BUILT_IN_ISFINITE:
2285 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2286 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2287 tree const type = TREE_TYPE (arg);
2288 REAL_VALUE_TYPE r;
2289 char buf[128];
2291 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2292 real_from_string (&r, buf);
2293 result = build_call_expr (isle_fn, 2,
2294 fold_build1 (ABS_EXPR, type, arg),
2295 build_real (type, r));
2296 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2298 case BUILT_IN_ISNORMAL:
2300 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2301 islessequal(fabs(x),DBL_MAX). */
2302 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2303 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2304 tree const type = TREE_TYPE (arg);
2305 REAL_VALUE_TYPE rmax, rmin;
2306 char buf[128];
2308 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2309 real_from_string (&rmax, buf);
2310 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2311 real_from_string (&rmin, buf);
2312 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2313 result = build_call_expr (isle_fn, 2, arg,
2314 build_real (type, rmax));
2315 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2316 build_call_expr (isge_fn, 2, arg,
2317 build_real (type, rmin)));
2318 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2320 default:
2321 break;
2324 target = expand_call (exp, target, target == const0_rtx);
2326 return target;
2329 /* Expand a call to the builtin sincos math function.
2330 Return NULL_RTX if a normal call should be emitted rather than expanding the
2331 function in-line. EXP is the expression that is a call to the builtin
2332 function. */
2334 static rtx
2335 expand_builtin_sincos (tree exp)
2337 rtx op0, op1, op2, target1, target2;
2338 enum machine_mode mode;
2339 tree arg, sinp, cosp;
2340 int result;
2342 if (!validate_arglist (exp, REAL_TYPE,
2343 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2344 return NULL_RTX;
2346 arg = CALL_EXPR_ARG (exp, 0);
2347 sinp = CALL_EXPR_ARG (exp, 1);
2348 cosp = CALL_EXPR_ARG (exp, 2);
2350 /* Make a suitable register to place result in. */
2351 mode = TYPE_MODE (TREE_TYPE (arg));
2353 /* Check if sincos insn is available, otherwise emit the call. */
2354 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2355 return NULL_RTX;
2357 target1 = gen_reg_rtx (mode);
2358 target2 = gen_reg_rtx (mode);
2360 op0 = expand_normal (arg);
2361 op1 = expand_normal (build_fold_indirect_ref (sinp));
2362 op2 = expand_normal (build_fold_indirect_ref (cosp));
2364 /* Compute into target1 and target2.
2365 Set TARGET to wherever the result comes back. */
2366 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2367 gcc_assert (result);
2369 /* Move target1 and target2 to the memory locations indicated
2370 by op1 and op2. */
2371 emit_move_insn (op1, target1);
2372 emit_move_insn (op2, target2);
2374 return const0_rtx;
2377 /* Expand a call to the internal cexpi builtin to the sincos math function.
2378 EXP is the expression that is a call to the builtin function; if convenient,
2379 the result should be placed in TARGET. SUBTARGET may be used as the target
2380 for computing one of EXP's operands. */
2382 static rtx
2383 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2385 tree fndecl = get_callee_fndecl (exp);
2386 tree arg, type;
2387 enum machine_mode mode;
2388 rtx op0, op1, op2;
2390 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2391 return NULL_RTX;
2393 arg = CALL_EXPR_ARG (exp, 0);
2394 type = TREE_TYPE (arg);
2395 mode = TYPE_MODE (TREE_TYPE (arg));
2397 /* Try expanding via a sincos optab, fall back to emitting a libcall
2398 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2399 is only generated from sincos, cexp or if we have either of them. */
2400 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2402 op1 = gen_reg_rtx (mode);
2403 op2 = gen_reg_rtx (mode);
2405 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2407 /* Compute into op1 and op2. */
2408 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2410 else if (TARGET_HAS_SINCOS)
2412 tree call, fn = NULL_TREE;
2413 tree top1, top2;
2414 rtx op1a, op2a;
2416 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2417 fn = built_in_decls[BUILT_IN_SINCOSF];
2418 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2419 fn = built_in_decls[BUILT_IN_SINCOS];
2420 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2421 fn = built_in_decls[BUILT_IN_SINCOSL];
2422 else
2423 gcc_unreachable ();
2425 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2426 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2427 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2428 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2429 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2430 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2432 /* Make sure not to fold the sincos call again. */
2433 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2434 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2435 call, 3, arg, top1, top2));
2437 else
2439 tree call, fn = NULL_TREE, narg;
2440 tree ctype = build_complex_type (type);
2442 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2443 fn = built_in_decls[BUILT_IN_CEXPF];
2444 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2445 fn = built_in_decls[BUILT_IN_CEXP];
2446 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2447 fn = built_in_decls[BUILT_IN_CEXPL];
2448 else
2449 gcc_unreachable ();
2451 /* If we don't have a decl for cexp create one. This is the
2452 friendliest fallback if the user calls __builtin_cexpi
2453 without full target C99 function support. */
2454 if (fn == NULL_TREE)
2456 tree fntype;
2457 const char *name = NULL;
2459 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2460 name = "cexpf";
2461 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2462 name = "cexp";
2463 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2464 name = "cexpl";
2466 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2467 fn = build_fn_decl (name, fntype);
2470 narg = fold_build2 (COMPLEX_EXPR, ctype,
2471 build_real (type, dconst0), arg);
2473 /* Make sure not to fold the cexp call again. */
2474 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2475 return expand_expr (build_call_nary (ctype, call, 1, narg),
2476 target, VOIDmode, EXPAND_NORMAL);
2479 /* Now build the proper return type. */
2480 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2481 make_tree (TREE_TYPE (arg), op2),
2482 make_tree (TREE_TYPE (arg), op1)),
2483 target, VOIDmode, EXPAND_NORMAL);
2486 /* Expand a call to one of the builtin rounding functions gcc defines
2487 as an extension (lfloor and lceil). As these are gcc extensions we
2488 do not need to worry about setting errno to EDOM.
2489 If expanding via optab fails, lower expression to (int)(floor(x)).
2490 EXP is the expression that is a call to the builtin function;
2491 if convenient, the result should be placed in TARGET. SUBTARGET may
2492 be used as the target for computing one of EXP's operands. */
2494 static rtx
2495 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2497 convert_optab builtin_optab;
2498 rtx op0, insns, tmp;
2499 tree fndecl = get_callee_fndecl (exp);
2500 enum built_in_function fallback_fn;
2501 tree fallback_fndecl;
2502 enum machine_mode mode;
2503 tree arg;
2505 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2506 gcc_unreachable ();
2508 arg = CALL_EXPR_ARG (exp, 0);
2510 switch (DECL_FUNCTION_CODE (fndecl))
2512 CASE_FLT_FN (BUILT_IN_LCEIL):
2513 CASE_FLT_FN (BUILT_IN_LLCEIL):
2514 builtin_optab = lceil_optab;
2515 fallback_fn = BUILT_IN_CEIL;
2516 break;
2518 CASE_FLT_FN (BUILT_IN_LFLOOR):
2519 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2520 builtin_optab = lfloor_optab;
2521 fallback_fn = BUILT_IN_FLOOR;
2522 break;
2524 default:
2525 gcc_unreachable ();
2528 /* Make a suitable register to place result in. */
2529 mode = TYPE_MODE (TREE_TYPE (exp));
2531 target = gen_reg_rtx (mode);
2533 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2534 need to expand the argument again. This way, we will not perform
2535 side-effects more the once. */
2536 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2538 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2540 start_sequence ();
2542 /* Compute into TARGET. */
2543 if (expand_sfix_optab (target, op0, builtin_optab))
2545 /* Output the entire sequence. */
2546 insns = get_insns ();
2547 end_sequence ();
2548 emit_insn (insns);
2549 return target;
2552 /* If we were unable to expand via the builtin, stop the sequence
2553 (without outputting the insns). */
2554 end_sequence ();
2556 /* Fall back to floating point rounding optab. */
2557 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2559 /* For non-C99 targets we may end up without a fallback fndecl here
2560 if the user called __builtin_lfloor directly. In this case emit
2561 a call to the floor/ceil variants nevertheless. This should result
2562 in the best user experience for not full C99 targets. */
2563 if (fallback_fndecl == NULL_TREE)
2565 tree fntype;
2566 const char *name = NULL;
2568 switch (DECL_FUNCTION_CODE (fndecl))
2570 case BUILT_IN_LCEIL:
2571 case BUILT_IN_LLCEIL:
2572 name = "ceil";
2573 break;
2574 case BUILT_IN_LCEILF:
2575 case BUILT_IN_LLCEILF:
2576 name = "ceilf";
2577 break;
2578 case BUILT_IN_LCEILL:
2579 case BUILT_IN_LLCEILL:
2580 name = "ceill";
2581 break;
2582 case BUILT_IN_LFLOOR:
2583 case BUILT_IN_LLFLOOR:
2584 name = "floor";
2585 break;
2586 case BUILT_IN_LFLOORF:
2587 case BUILT_IN_LLFLOORF:
2588 name = "floorf";
2589 break;
2590 case BUILT_IN_LFLOORL:
2591 case BUILT_IN_LLFLOORL:
2592 name = "floorl";
2593 break;
2594 default:
2595 gcc_unreachable ();
2598 fntype = build_function_type_list (TREE_TYPE (arg),
2599 TREE_TYPE (arg), NULL_TREE);
2600 fallback_fndecl = build_fn_decl (name, fntype);
2603 exp = build_call_expr (fallback_fndecl, 1, arg);
2605 tmp = expand_normal (exp);
2607 /* Truncate the result of floating point optab to integer
2608 via expand_fix (). */
2609 target = gen_reg_rtx (mode);
2610 expand_fix (target, tmp, 0);
2612 return target;
2615 /* Expand a call to one of the builtin math functions doing integer
2616 conversion (lrint).
2617 Return 0 if a normal call should be emitted rather than expanding the
2618 function in-line. EXP is the expression that is a call to the builtin
2619 function; if convenient, the result should be placed in TARGET.
2620 SUBTARGET may be used as the target for computing one of EXP's operands. */
2622 static rtx
2623 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2625 convert_optab builtin_optab;
2626 rtx op0, insns;
2627 tree fndecl = get_callee_fndecl (exp);
2628 tree arg;
2629 enum machine_mode mode;
2631 /* There's no easy way to detect the case we need to set EDOM. */
2632 if (flag_errno_math)
2633 return NULL_RTX;
2635 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2636 gcc_unreachable ();
2638 arg = CALL_EXPR_ARG (exp, 0);
2640 switch (DECL_FUNCTION_CODE (fndecl))
2642 CASE_FLT_FN (BUILT_IN_LRINT):
2643 CASE_FLT_FN (BUILT_IN_LLRINT):
2644 builtin_optab = lrint_optab; break;
2645 CASE_FLT_FN (BUILT_IN_LROUND):
2646 CASE_FLT_FN (BUILT_IN_LLROUND):
2647 builtin_optab = lround_optab; break;
2648 default:
2649 gcc_unreachable ();
2652 /* Make a suitable register to place result in. */
2653 mode = TYPE_MODE (TREE_TYPE (exp));
2655 target = gen_reg_rtx (mode);
2657 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2658 need to expand the argument again. This way, we will not perform
2659 side-effects more the once. */
2660 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2662 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2664 start_sequence ();
2666 if (expand_sfix_optab (target, op0, builtin_optab))
2668 /* Output the entire sequence. */
2669 insns = get_insns ();
2670 end_sequence ();
2671 emit_insn (insns);
2672 return target;
2675 /* If we were unable to expand via the builtin, stop the sequence
2676 (without outputting the insns) and call to the library function
2677 with the stabilized argument list. */
2678 end_sequence ();
2680 target = expand_call (exp, target, target == const0_rtx);
2682 return target;
2685 /* To evaluate powi(x,n), the floating point value x raised to the
2686 constant integer exponent n, we use a hybrid algorithm that
2687 combines the "window method" with look-up tables. For an
2688 introduction to exponentiation algorithms and "addition chains",
2689 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2690 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2691 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2692 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2694 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2695 multiplications to inline before calling the system library's pow
2696 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2697 so this default never requires calling pow, powf or powl. */
2699 #ifndef POWI_MAX_MULTS
2700 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2701 #endif
2703 /* The size of the "optimal power tree" lookup table. All
2704 exponents less than this value are simply looked up in the
2705 powi_table below. This threshold is also used to size the
2706 cache of pseudo registers that hold intermediate results. */
2707 #define POWI_TABLE_SIZE 256
2709 /* The size, in bits of the window, used in the "window method"
2710 exponentiation algorithm. This is equivalent to a radix of
2711 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2712 #define POWI_WINDOW_SIZE 3
2714 /* The following table is an efficient representation of an
2715 "optimal power tree". For each value, i, the corresponding
2716 value, j, in the table states than an optimal evaluation
2717 sequence for calculating pow(x,i) can be found by evaluating
2718 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2719 100 integers is given in Knuth's "Seminumerical algorithms". */
2721 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2723 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2724 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2725 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2726 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2727 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2728 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2729 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2730 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2731 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2732 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2733 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2734 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2735 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2736 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2737 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2738 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2739 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2740 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2741 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2742 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2743 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2744 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2745 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2746 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2747 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2748 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2749 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2750 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2751 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2752 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2753 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2754 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2758 /* Return the number of multiplications required to calculate
2759 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2760 subroutine of powi_cost. CACHE is an array indicating
2761 which exponents have already been calculated. */
2763 static int
2764 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2766 /* If we've already calculated this exponent, then this evaluation
2767 doesn't require any additional multiplications. */
2768 if (cache[n])
2769 return 0;
2771 cache[n] = true;
2772 return powi_lookup_cost (n - powi_table[n], cache)
2773 + powi_lookup_cost (powi_table[n], cache) + 1;
2776 /* Return the number of multiplications required to calculate
2777 powi(x,n) for an arbitrary x, given the exponent N. This
2778 function needs to be kept in sync with expand_powi below. */
2780 static int
2781 powi_cost (HOST_WIDE_INT n)
2783 bool cache[POWI_TABLE_SIZE];
2784 unsigned HOST_WIDE_INT digit;
2785 unsigned HOST_WIDE_INT val;
2786 int result;
2788 if (n == 0)
2789 return 0;
2791 /* Ignore the reciprocal when calculating the cost. */
2792 val = (n < 0) ? -n : n;
2794 /* Initialize the exponent cache. */
2795 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2796 cache[1] = true;
2798 result = 0;
2800 while (val >= POWI_TABLE_SIZE)
2802 if (val & 1)
2804 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2805 result += powi_lookup_cost (digit, cache)
2806 + POWI_WINDOW_SIZE + 1;
2807 val >>= POWI_WINDOW_SIZE;
2809 else
2811 val >>= 1;
2812 result++;
2816 return result + powi_lookup_cost (val, cache);
2819 /* Recursive subroutine of expand_powi. This function takes the array,
2820 CACHE, of already calculated exponents and an exponent N and returns
2821 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2823 static rtx
2824 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2826 unsigned HOST_WIDE_INT digit;
2827 rtx target, result;
2828 rtx op0, op1;
2830 if (n < POWI_TABLE_SIZE)
2832 if (cache[n])
2833 return cache[n];
2835 target = gen_reg_rtx (mode);
2836 cache[n] = target;
2838 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2839 op1 = expand_powi_1 (mode, powi_table[n], cache);
2841 else if (n & 1)
2843 target = gen_reg_rtx (mode);
2844 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2845 op0 = expand_powi_1 (mode, n - digit, cache);
2846 op1 = expand_powi_1 (mode, digit, cache);
2848 else
2850 target = gen_reg_rtx (mode);
2851 op0 = expand_powi_1 (mode, n >> 1, cache);
2852 op1 = op0;
2855 result = expand_mult (mode, op0, op1, target, 0);
2856 if (result != target)
2857 emit_move_insn (target, result);
2858 return target;
2861 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2862 floating point operand in mode MODE, and N is the exponent. This
2863 function needs to be kept in sync with powi_cost above. */
2865 static rtx
2866 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2868 unsigned HOST_WIDE_INT val;
2869 rtx cache[POWI_TABLE_SIZE];
2870 rtx result;
2872 if (n == 0)
2873 return CONST1_RTX (mode);
2875 val = (n < 0) ? -n : n;
2877 memset (cache, 0, sizeof (cache));
2878 cache[1] = x;
2880 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2882 /* If the original exponent was negative, reciprocate the result. */
2883 if (n < 0)
2884 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2885 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2887 return result;
2890 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2891 a normal call should be emitted rather than expanding the function
2892 in-line. EXP is the expression that is a call to the builtin
2893 function; if convenient, the result should be placed in TARGET. */
2895 static rtx
2896 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2898 tree arg0, arg1;
2899 tree fn, narg0;
2900 tree type = TREE_TYPE (exp);
2901 REAL_VALUE_TYPE cint, c, c2;
2902 HOST_WIDE_INT n;
2903 rtx op, op2;
2904 enum machine_mode mode = TYPE_MODE (type);
2906 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2907 return NULL_RTX;
2909 arg0 = CALL_EXPR_ARG (exp, 0);
2910 arg1 = CALL_EXPR_ARG (exp, 1);
2912 if (TREE_CODE (arg1) != REAL_CST
2913 || TREE_OVERFLOW (arg1))
2914 return expand_builtin_mathfn_2 (exp, target, subtarget);
2916 /* Handle constant exponents. */
2918 /* For integer valued exponents we can expand to an optimal multiplication
2919 sequence using expand_powi. */
2920 c = TREE_REAL_CST (arg1);
2921 n = real_to_integer (&c);
2922 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2923 if (real_identical (&c, &cint)
2924 && ((n >= -1 && n <= 2)
2925 || (flag_unsafe_math_optimizations
2926 && !optimize_size
2927 && powi_cost (n) <= POWI_MAX_MULTS)))
2929 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2930 if (n != 1)
2932 op = force_reg (mode, op);
2933 op = expand_powi (op, mode, n);
2935 return op;
2938 narg0 = builtin_save_expr (arg0);
2940 /* If the exponent is not integer valued, check if it is half of an integer.
2941 In this case we can expand to sqrt (x) * x**(n/2). */
2942 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2943 if (fn != NULL_TREE)
2945 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2946 n = real_to_integer (&c2);
2947 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2948 if (real_identical (&c2, &cint)
2949 && ((flag_unsafe_math_optimizations
2950 && !optimize_size
2951 && powi_cost (n/2) <= POWI_MAX_MULTS)
2952 || n == 1))
2954 tree call_expr = build_call_expr (fn, 1, narg0);
2955 /* Use expand_expr in case the newly built call expression
2956 was folded to a non-call. */
2957 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2958 if (n != 1)
2960 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2961 op2 = force_reg (mode, op2);
2962 op2 = expand_powi (op2, mode, abs (n / 2));
2963 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2964 0, OPTAB_LIB_WIDEN);
2965 /* If the original exponent was negative, reciprocate the
2966 result. */
2967 if (n < 0)
2968 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2969 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2971 return op;
2975 /* Try if the exponent is a third of an integer. In this case
2976 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2977 different from pow (x, 1./3.) due to rounding and behavior
2978 with negative x we need to constrain this transformation to
2979 unsafe math and positive x or finite math. */
2980 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2981 if (fn != NULL_TREE
2982 && flag_unsafe_math_optimizations
2983 && (tree_expr_nonnegative_p (arg0)
2984 || !HONOR_NANS (mode)))
2986 REAL_VALUE_TYPE dconst3;
2987 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2988 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2989 real_round (&c2, mode, &c2);
2990 n = real_to_integer (&c2);
2991 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2992 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2993 real_convert (&c2, mode, &c2);
2994 if (real_identical (&c2, &c)
2995 && ((!optimize_size
2996 && powi_cost (n/3) <= POWI_MAX_MULTS)
2997 || n == 1))
2999 tree call_expr = build_call_expr (fn, 1,narg0);
3000 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3001 if (abs (n) % 3 == 2)
3002 op = expand_simple_binop (mode, MULT, op, op, op,
3003 0, OPTAB_LIB_WIDEN);
3004 if (n != 1)
3006 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3007 op2 = force_reg (mode, op2);
3008 op2 = expand_powi (op2, mode, abs (n / 3));
3009 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3010 0, OPTAB_LIB_WIDEN);
3011 /* If the original exponent was negative, reciprocate the
3012 result. */
3013 if (n < 0)
3014 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3015 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3017 return op;
3021 /* Fall back to optab expansion. */
3022 return expand_builtin_mathfn_2 (exp, target, subtarget);
3025 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3026 a normal call should be emitted rather than expanding the function
3027 in-line. EXP is the expression that is a call to the builtin
3028 function; if convenient, the result should be placed in TARGET. */
3030 static rtx
3031 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3033 tree arg0, arg1;
3034 rtx op0, op1;
3035 enum machine_mode mode;
3036 enum machine_mode mode2;
3038 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3039 return NULL_RTX;
3041 arg0 = CALL_EXPR_ARG (exp, 0);
3042 arg1 = CALL_EXPR_ARG (exp, 1);
3043 mode = TYPE_MODE (TREE_TYPE (exp));
3045 /* Handle constant power. */
3047 if (TREE_CODE (arg1) == INTEGER_CST
3048 && !TREE_OVERFLOW (arg1))
3050 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3052 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3053 Otherwise, check the number of multiplications required. */
3054 if ((TREE_INT_CST_HIGH (arg1) == 0
3055 || TREE_INT_CST_HIGH (arg1) == -1)
3056 && ((n >= -1 && n <= 2)
3057 || (! optimize_size
3058 && powi_cost (n) <= POWI_MAX_MULTS)))
3060 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3061 op0 = force_reg (mode, op0);
3062 return expand_powi (op0, mode, n);
3066 /* Emit a libcall to libgcc. */
3068 /* Mode of the 2nd argument must match that of an int. */
3069 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3071 if (target == NULL_RTX)
3072 target = gen_reg_rtx (mode);
3074 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3075 if (GET_MODE (op0) != mode)
3076 op0 = convert_to_mode (mode, op0, 0);
3077 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3078 if (GET_MODE (op1) != mode2)
3079 op1 = convert_to_mode (mode2, op1, 0);
3081 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3082 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3083 op0, mode, op1, mode2);
3085 return target;
3088 /* Expand expression EXP which is a call to the strlen builtin. Return
3089 NULL_RTX if we failed the caller should emit a normal call, otherwise
3090 try to get the result in TARGET, if convenient. */
3092 static rtx
3093 expand_builtin_strlen (tree exp, rtx target,
3094 enum machine_mode target_mode)
3096 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3097 return NULL_RTX;
3098 else
3100 rtx pat;
3101 tree len;
3102 tree src = CALL_EXPR_ARG (exp, 0);
3103 rtx result, src_reg, char_rtx, before_strlen;
3104 enum machine_mode insn_mode = target_mode, char_mode;
3105 enum insn_code icode = CODE_FOR_nothing;
3106 int align;
3108 /* If the length can be computed at compile-time, return it. */
3109 len = c_strlen (src, 0);
3110 if (len)
3111 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3113 /* If the length can be computed at compile-time and is constant
3114 integer, but there are side-effects in src, evaluate
3115 src for side-effects, then return len.
3116 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3117 can be optimized into: i++; x = 3; */
3118 len = c_strlen (src, 1);
3119 if (len && TREE_CODE (len) == INTEGER_CST)
3121 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3122 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3125 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3127 /* If SRC is not a pointer type, don't do this operation inline. */
3128 if (align == 0)
3129 return NULL_RTX;
3131 /* Bail out if we can't compute strlen in the right mode. */
3132 while (insn_mode != VOIDmode)
3134 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3135 if (icode != CODE_FOR_nothing)
3136 break;
3138 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3140 if (insn_mode == VOIDmode)
3141 return NULL_RTX;
3143 /* Make a place to write the result of the instruction. */
3144 result = target;
3145 if (! (result != 0
3146 && REG_P (result)
3147 && GET_MODE (result) == insn_mode
3148 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3149 result = gen_reg_rtx (insn_mode);
3151 /* Make a place to hold the source address. We will not expand
3152 the actual source until we are sure that the expansion will
3153 not fail -- there are trees that cannot be expanded twice. */
3154 src_reg = gen_reg_rtx (Pmode);
3156 /* Mark the beginning of the strlen sequence so we can emit the
3157 source operand later. */
3158 before_strlen = get_last_insn ();
3160 char_rtx = const0_rtx;
3161 char_mode = insn_data[(int) icode].operand[2].mode;
3162 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3163 char_mode))
3164 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3166 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3167 char_rtx, GEN_INT (align));
3168 if (! pat)
3169 return NULL_RTX;
3170 emit_insn (pat);
3172 /* Now that we are assured of success, expand the source. */
3173 start_sequence ();
3174 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3175 if (pat != src_reg)
3176 emit_move_insn (src_reg, pat);
3177 pat = get_insns ();
3178 end_sequence ();
3180 if (before_strlen)
3181 emit_insn_after (pat, before_strlen);
3182 else
3183 emit_insn_before (pat, get_insns ());
3185 /* Return the value in the proper mode for this function. */
3186 if (GET_MODE (result) == target_mode)
3187 target = result;
3188 else if (target != 0)
3189 convert_move (target, result, 0);
3190 else
3191 target = convert_to_mode (target_mode, result, 0);
3193 return target;
3197 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3198 caller should emit a normal call, otherwise try to get the result
3199 in TARGET, if convenient (and in mode MODE if that's convenient). */
3201 static rtx
3202 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3204 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3206 tree type = TREE_TYPE (exp);
3207 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3208 CALL_EXPR_ARG (exp, 1), type);
3209 if (result)
3210 return expand_expr (result, target, mode, EXPAND_NORMAL);
3212 return NULL_RTX;
3215 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3216 caller should emit a normal call, otherwise try to get the result
3217 in TARGET, if convenient (and in mode MODE if that's convenient). */
3219 static rtx
3220 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3222 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3224 tree type = TREE_TYPE (exp);
3225 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3226 CALL_EXPR_ARG (exp, 1), type);
3227 if (result)
3228 return expand_expr (result, target, mode, EXPAND_NORMAL);
3230 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3232 return NULL_RTX;
3235 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3236 caller should emit a normal call, otherwise try to get the result
3237 in TARGET, if convenient (and in mode MODE if that's convenient). */
3239 static rtx
3240 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3242 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3244 tree type = TREE_TYPE (exp);
3245 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3246 CALL_EXPR_ARG (exp, 1), type);
3247 if (result)
3248 return expand_expr (result, target, mode, EXPAND_NORMAL);
3250 return NULL_RTX;
3253 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3254 caller should emit a normal call, otherwise try to get the result
3255 in TARGET, if convenient (and in mode MODE if that's convenient). */
3257 static rtx
3258 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3260 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3262 tree type = TREE_TYPE (exp);
3263 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3264 CALL_EXPR_ARG (exp, 1), type);
3265 if (result)
3266 return expand_expr (result, target, mode, EXPAND_NORMAL);
3268 return NULL_RTX;
3271 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3272 bytes from constant string DATA + OFFSET and return it as target
3273 constant. */
3275 static rtx
3276 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3277 enum machine_mode mode)
3279 const char *str = (const char *) data;
3281 gcc_assert (offset >= 0
3282 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3283 <= strlen (str) + 1));
3285 return c_readstr (str + offset, mode);
3288 /* Expand a call EXP to the memcpy builtin.
3289 Return NULL_RTX if we failed, the caller should emit a normal call,
3290 otherwise try to get the result in TARGET, if convenient (and in
3291 mode MODE if that's convenient). */
3293 static rtx
3294 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3296 tree fndecl = get_callee_fndecl (exp);
3298 if (!validate_arglist (exp,
3299 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3300 return NULL_RTX;
3301 else
3303 tree dest = CALL_EXPR_ARG (exp, 0);
3304 tree src = CALL_EXPR_ARG (exp, 1);
3305 tree len = CALL_EXPR_ARG (exp, 2);
3306 const char *src_str;
3307 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3308 unsigned int dest_align
3309 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3310 rtx dest_mem, src_mem, dest_addr, len_rtx;
3311 tree result = fold_builtin_memory_op (dest, src, len,
3312 TREE_TYPE (TREE_TYPE (fndecl)),
3313 false, /*endp=*/0);
3314 HOST_WIDE_INT expected_size = -1;
3315 unsigned int expected_align = 0;
3317 if (result)
3319 while (TREE_CODE (result) == COMPOUND_EXPR)
3321 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3322 EXPAND_NORMAL);
3323 result = TREE_OPERAND (result, 1);
3325 return expand_expr (result, target, mode, EXPAND_NORMAL);
3328 /* If DEST is not a pointer type, call the normal function. */
3329 if (dest_align == 0)
3330 return NULL_RTX;
3332 /* If either SRC is not a pointer type, don't do this
3333 operation in-line. */
3334 if (src_align == 0)
3335 return NULL_RTX;
3337 stringop_block_profile (exp, &expected_align, &expected_size);
3338 if (expected_align < dest_align)
3339 expected_align = dest_align;
3340 dest_mem = get_memory_rtx (dest, len);
3341 set_mem_align (dest_mem, dest_align);
3342 len_rtx = expand_normal (len);
3343 src_str = c_getstr (src);
3345 /* If SRC is a string constant and block move would be done
3346 by pieces, we can avoid loading the string from memory
3347 and only stored the computed constants. */
3348 if (src_str
3349 && GET_CODE (len_rtx) == CONST_INT
3350 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3351 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3352 (void *) src_str, dest_align, false))
3354 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3355 builtin_memcpy_read_str,
3356 (void *) src_str, dest_align, false, 0);
3357 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3358 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3359 return dest_mem;
3362 src_mem = get_memory_rtx (src, len);
3363 set_mem_align (src_mem, src_align);
3365 /* Copy word part most expediently. */
3366 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3367 CALL_EXPR_TAILCALL (exp)
3368 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3369 expected_align, expected_size);
3371 if (dest_addr == 0)
3373 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3374 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3376 return dest_addr;
3380 /* Expand a call EXP to the mempcpy builtin.
3381 Return NULL_RTX if we failed; the caller should emit a normal call,
3382 otherwise try to get the result in TARGET, if convenient (and in
3383 mode MODE if that's convenient). If ENDP is 0 return the
3384 destination pointer, if ENDP is 1 return the end pointer ala
3385 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3386 stpcpy. */
3388 static rtx
3389 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3391 if (!validate_arglist (exp,
3392 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3393 return NULL_RTX;
3394 else
3396 tree dest = CALL_EXPR_ARG (exp, 0);
3397 tree src = CALL_EXPR_ARG (exp, 1);
3398 tree len = CALL_EXPR_ARG (exp, 2);
3399 return expand_builtin_mempcpy_args (dest, src, len,
3400 TREE_TYPE (exp),
3401 target, mode, /*endp=*/ 1);
3405 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3406 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3407 so that this can also be called without constructing an actual CALL_EXPR.
3408 TYPE is the return type of the call. The other arguments and return value
3409 are the same as for expand_builtin_mempcpy. */
3411 static rtx
3412 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3413 rtx target, enum machine_mode mode, int endp)
3415 /* If return value is ignored, transform mempcpy into memcpy. */
3416 if (target == const0_rtx)
3418 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3420 if (!fn)
3421 return NULL_RTX;
3423 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3424 target, mode, EXPAND_NORMAL);
3426 else
3428 const char *src_str;
3429 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3430 unsigned int dest_align
3431 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3432 rtx dest_mem, src_mem, len_rtx;
3433 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3435 if (result)
3437 while (TREE_CODE (result) == COMPOUND_EXPR)
3439 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3440 EXPAND_NORMAL);
3441 result = TREE_OPERAND (result, 1);
3443 return expand_expr (result, target, mode, EXPAND_NORMAL);
3446 /* If either SRC or DEST is not a pointer type, don't do this
3447 operation in-line. */
3448 if (dest_align == 0 || src_align == 0)
3449 return NULL_RTX;
3451 /* If LEN is not constant, call the normal function. */
3452 if (! host_integerp (len, 1))
3453 return NULL_RTX;
3455 len_rtx = expand_normal (len);
3456 src_str = c_getstr (src);
3458 /* If SRC is a string constant and block move would be done
3459 by pieces, we can avoid loading the string from memory
3460 and only stored the computed constants. */
3461 if (src_str
3462 && GET_CODE (len_rtx) == CONST_INT
3463 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3464 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3465 (void *) src_str, dest_align, false))
3467 dest_mem = get_memory_rtx (dest, len);
3468 set_mem_align (dest_mem, dest_align);
3469 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3470 builtin_memcpy_read_str,
3471 (void *) src_str, dest_align,
3472 false, endp);
3473 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3474 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3475 return dest_mem;
3478 if (GET_CODE (len_rtx) == CONST_INT
3479 && can_move_by_pieces (INTVAL (len_rtx),
3480 MIN (dest_align, src_align)))
3482 dest_mem = get_memory_rtx (dest, len);
3483 set_mem_align (dest_mem, dest_align);
3484 src_mem = get_memory_rtx (src, len);
3485 set_mem_align (src_mem, src_align);
3486 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3487 MIN (dest_align, src_align), endp);
3488 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3489 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3490 return dest_mem;
3493 return NULL_RTX;
3497 /* Expand expression EXP, which is a call to the memmove builtin. Return
3498 NULL_RTX if we failed; the caller should emit a normal call. */
3500 static rtx
3501 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3503 if (!validate_arglist (exp,
3504 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3505 return NULL_RTX;
3506 else
3508 tree dest = CALL_EXPR_ARG (exp, 0);
3509 tree src = CALL_EXPR_ARG (exp, 1);
3510 tree len = CALL_EXPR_ARG (exp, 2);
3511 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3512 target, mode, ignore);
3516 /* Helper function to do the actual work for expand_builtin_memmove. The
3517 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3518 so that this can also be called without constructing an actual CALL_EXPR.
3519 TYPE is the return type of the call. The other arguments and return value
3520 are the same as for expand_builtin_memmove. */
3522 static rtx
3523 expand_builtin_memmove_args (tree dest, tree src, tree len,
3524 tree type, rtx target, enum machine_mode mode,
3525 int ignore)
3527 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3529 if (result)
3531 STRIP_TYPE_NOPS (result);
3532 while (TREE_CODE (result) == COMPOUND_EXPR)
3534 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3535 EXPAND_NORMAL);
3536 result = TREE_OPERAND (result, 1);
3538 return expand_expr (result, target, mode, EXPAND_NORMAL);
3541 /* Otherwise, call the normal function. */
3542 return NULL_RTX;
3545 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3546 NULL_RTX if we failed the caller should emit a normal call. */
3548 static rtx
3549 expand_builtin_bcopy (tree exp, int ignore)
3551 tree type = TREE_TYPE (exp);
3552 tree src, dest, size;
3554 if (!validate_arglist (exp,
3555 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3556 return NULL_RTX;
3558 src = CALL_EXPR_ARG (exp, 0);
3559 dest = CALL_EXPR_ARG (exp, 1);
3560 size = CALL_EXPR_ARG (exp, 2);
3562 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3563 This is done this way so that if it isn't expanded inline, we fall
3564 back to calling bcopy instead of memmove. */
3565 return expand_builtin_memmove_args (dest, src,
3566 fold_convert (sizetype, size),
3567 type, const0_rtx, VOIDmode,
3568 ignore);
3571 #ifndef HAVE_movstr
3572 # define HAVE_movstr 0
3573 # define CODE_FOR_movstr CODE_FOR_nothing
3574 #endif
3576 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3577 we failed, the caller should emit a normal call, otherwise try to
3578 get the result in TARGET, if convenient. If ENDP is 0 return the
3579 destination pointer, if ENDP is 1 return the end pointer ala
3580 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3581 stpcpy. */
3583 static rtx
3584 expand_movstr (tree dest, tree src, rtx target, int endp)
3586 rtx end;
3587 rtx dest_mem;
3588 rtx src_mem;
3589 rtx insn;
3590 const struct insn_data * data;
3592 if (!HAVE_movstr)
3593 return NULL_RTX;
3595 dest_mem = get_memory_rtx (dest, NULL);
3596 src_mem = get_memory_rtx (src, NULL);
3597 if (!endp)
3599 target = force_reg (Pmode, XEXP (dest_mem, 0));
3600 dest_mem = replace_equiv_address (dest_mem, target);
3601 end = gen_reg_rtx (Pmode);
3603 else
3605 if (target == 0 || target == const0_rtx)
3607 end = gen_reg_rtx (Pmode);
3608 if (target == 0)
3609 target = end;
3611 else
3612 end = target;
3615 data = insn_data + CODE_FOR_movstr;
3617 if (data->operand[0].mode != VOIDmode)
3618 end = gen_lowpart (data->operand[0].mode, end);
3620 insn = data->genfun (end, dest_mem, src_mem);
3622 gcc_assert (insn);
3624 emit_insn (insn);
3626 /* movstr is supposed to set end to the address of the NUL
3627 terminator. If the caller requested a mempcpy-like return value,
3628 adjust it. */
3629 if (endp == 1 && target != const0_rtx)
3631 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3632 emit_move_insn (target, force_operand (tem, NULL_RTX));
3635 return target;
3638 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3639 NULL_RTX if we failed the caller should emit a normal call, otherwise
3640 try to get the result in TARGET, if convenient (and in mode MODE if that's
3641 convenient). */
3643 static rtx
3644 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3646 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3648 tree dest = CALL_EXPR_ARG (exp, 0);
3649 tree src = CALL_EXPR_ARG (exp, 1);
3650 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3652 return NULL_RTX;
3655 /* Helper function to do the actual work for expand_builtin_strcpy. The
3656 arguments to the builtin_strcpy call DEST and SRC are broken out
3657 so that this can also be called without constructing an actual CALL_EXPR.
3658 The other arguments and return value are the same as for
3659 expand_builtin_strcpy. */
3661 static rtx
3662 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3663 rtx target, enum machine_mode mode)
3665 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3666 if (result)
3667 return expand_expr (result, target, mode, EXPAND_NORMAL);
3668 return expand_movstr (dest, src, target, /*endp=*/0);
3672 /* Expand a call EXP to the stpcpy builtin.
3673 Return NULL_RTX if we failed the caller should emit a normal call,
3674 otherwise try to get the result in TARGET, if convenient (and in
3675 mode MODE if that's convenient). */
3677 static rtx
3678 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3680 tree dst, src;
3682 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3683 return NULL_RTX;
3685 dst = CALL_EXPR_ARG (exp, 0);
3686 src = CALL_EXPR_ARG (exp, 1);
3688 /* If return value is ignored, transform stpcpy into strcpy. */
3689 if (target == const0_rtx)
3691 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3692 if (!fn)
3693 return NULL_RTX;
3695 return expand_expr (build_call_expr (fn, 2, dst, src),
3696 target, mode, EXPAND_NORMAL);
3698 else
3700 tree len, lenp1;
3701 rtx ret;
3703 /* Ensure we get an actual string whose length can be evaluated at
3704 compile-time, not an expression containing a string. This is
3705 because the latter will potentially produce pessimized code
3706 when used to produce the return value. */
3707 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3708 return expand_movstr (dst, src, target, /*endp=*/2);
3710 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3711 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3712 target, mode, /*endp=*/2);
3714 if (ret)
3715 return ret;
3717 if (TREE_CODE (len) == INTEGER_CST)
3719 rtx len_rtx = expand_normal (len);
3721 if (GET_CODE (len_rtx) == CONST_INT)
3723 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3724 dst, src, target, mode);
3726 if (ret)
3728 if (! target)
3730 if (mode != VOIDmode)
3731 target = gen_reg_rtx (mode);
3732 else
3733 target = gen_reg_rtx (GET_MODE (ret));
3735 if (GET_MODE (target) != GET_MODE (ret))
3736 ret = gen_lowpart (GET_MODE (target), ret);
3738 ret = plus_constant (ret, INTVAL (len_rtx));
3739 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3740 gcc_assert (ret);
3742 return target;
3747 return expand_movstr (dst, src, target, /*endp=*/2);
3751 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3752 bytes from constant string DATA + OFFSET and return it as target
3753 constant. */
3756 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3757 enum machine_mode mode)
3759 const char *str = (const char *) data;
3761 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3762 return const0_rtx;
3764 return c_readstr (str + offset, mode);
3767 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3768 NULL_RTX if we failed the caller should emit a normal call. */
3770 static rtx
3771 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3773 tree fndecl = get_callee_fndecl (exp);
3775 if (validate_arglist (exp,
3776 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3778 tree dest = CALL_EXPR_ARG (exp, 0);
3779 tree src = CALL_EXPR_ARG (exp, 1);
3780 tree len = CALL_EXPR_ARG (exp, 2);
3781 tree slen = c_strlen (src, 1);
3782 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3784 if (result)
3786 while (TREE_CODE (result) == COMPOUND_EXPR)
3788 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3789 EXPAND_NORMAL);
3790 result = TREE_OPERAND (result, 1);
3792 return expand_expr (result, target, mode, EXPAND_NORMAL);
3795 /* We must be passed a constant len and src parameter. */
3796 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3797 return NULL_RTX;
3799 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3801 /* We're required to pad with trailing zeros if the requested
3802 len is greater than strlen(s2)+1. In that case try to
3803 use store_by_pieces, if it fails, punt. */
3804 if (tree_int_cst_lt (slen, len))
3806 unsigned int dest_align
3807 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3808 const char *p = c_getstr (src);
3809 rtx dest_mem;
3811 if (!p || dest_align == 0 || !host_integerp (len, 1)
3812 || !can_store_by_pieces (tree_low_cst (len, 1),
3813 builtin_strncpy_read_str,
3814 (void *) p, dest_align, false))
3815 return NULL_RTX;
3817 dest_mem = get_memory_rtx (dest, len);
3818 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3819 builtin_strncpy_read_str,
3820 (void *) p, dest_align, false, 0);
3821 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3822 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3823 return dest_mem;
3826 return NULL_RTX;
3829 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3830 bytes from constant string DATA + OFFSET and return it as target
3831 constant. */
3834 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3835 enum machine_mode mode)
3837 const char *c = (const char *) data;
3838 char *p = alloca (GET_MODE_SIZE (mode));
3840 memset (p, *c, GET_MODE_SIZE (mode));
3842 return c_readstr (p, mode);
3845 /* Callback routine for store_by_pieces. Return the RTL of a register
3846 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3847 char value given in the RTL register data. For example, if mode is
3848 4 bytes wide, return the RTL for 0x01010101*data. */
3850 static rtx
3851 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3852 enum machine_mode mode)
3854 rtx target, coeff;
3855 size_t size;
3856 char *p;
3858 size = GET_MODE_SIZE (mode);
3859 if (size == 1)
3860 return (rtx) data;
3862 p = alloca (size);
3863 memset (p, 1, size);
3864 coeff = c_readstr (p, mode);
3866 target = convert_to_mode (mode, (rtx) data, 1);
3867 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3868 return force_reg (mode, target);
3871 /* Expand expression EXP, which is a call to the memset builtin. Return
3872 NULL_RTX if we failed the caller should emit a normal call, otherwise
3873 try to get the result in TARGET, if convenient (and in mode MODE if that's
3874 convenient). */
3876 static rtx
3877 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3879 if (!validate_arglist (exp,
3880 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3881 return NULL_RTX;
3882 else
3884 tree dest = CALL_EXPR_ARG (exp, 0);
3885 tree val = CALL_EXPR_ARG (exp, 1);
3886 tree len = CALL_EXPR_ARG (exp, 2);
3887 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3891 /* Helper function to do the actual work for expand_builtin_memset. The
3892 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3893 so that this can also be called without constructing an actual CALL_EXPR.
3894 The other arguments and return value are the same as for
3895 expand_builtin_memset. */
3897 static rtx
3898 expand_builtin_memset_args (tree dest, tree val, tree len,
3899 rtx target, enum machine_mode mode, tree orig_exp)
3901 tree fndecl, fn;
3902 enum built_in_function fcode;
3903 char c;
3904 unsigned int dest_align;
3905 rtx dest_mem, dest_addr, len_rtx;
3906 HOST_WIDE_INT expected_size = -1;
3907 unsigned int expected_align = 0;
3909 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3911 /* If DEST is not a pointer type, don't do this operation in-line. */
3912 if (dest_align == 0)
3913 return NULL_RTX;
3915 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3916 if (expected_align < dest_align)
3917 expected_align = dest_align;
3919 /* If the LEN parameter is zero, return DEST. */
3920 if (integer_zerop (len))
3922 /* Evaluate and ignore VAL in case it has side-effects. */
3923 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3924 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3927 /* Stabilize the arguments in case we fail. */
3928 dest = builtin_save_expr (dest);
3929 val = builtin_save_expr (val);
3930 len = builtin_save_expr (len);
3932 len_rtx = expand_normal (len);
3933 dest_mem = get_memory_rtx (dest, len);
3935 if (TREE_CODE (val) != INTEGER_CST)
3937 rtx val_rtx;
3939 val_rtx = expand_normal (val);
3940 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3941 val_rtx, 0);
3943 /* Assume that we can memset by pieces if we can store
3944 * the coefficients by pieces (in the required modes).
3945 * We can't pass builtin_memset_gen_str as that emits RTL. */
3946 c = 1;
3947 if (host_integerp (len, 1)
3948 && can_store_by_pieces (tree_low_cst (len, 1),
3949 builtin_memset_read_str, &c, dest_align,
3950 true))
3952 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3953 val_rtx);
3954 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3955 builtin_memset_gen_str, val_rtx, dest_align,
3956 true, 0);
3958 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3959 dest_align, expected_align,
3960 expected_size))
3961 goto do_libcall;
3963 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3964 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3965 return dest_mem;
3968 if (target_char_cast (val, &c))
3969 goto do_libcall;
3971 if (c)
3973 if (host_integerp (len, 1)
3974 && can_store_by_pieces (tree_low_cst (len, 1),
3975 builtin_memset_read_str, &c, dest_align,
3976 true))
3977 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3978 builtin_memset_read_str, &c, dest_align, true, 0);
3979 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3980 dest_align, expected_align,
3981 expected_size))
3982 goto do_libcall;
3984 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3985 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3986 return dest_mem;
3989 set_mem_align (dest_mem, dest_align);
3990 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3991 CALL_EXPR_TAILCALL (orig_exp)
3992 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3993 expected_align, expected_size);
3995 if (dest_addr == 0)
3997 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3998 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4001 return dest_addr;
4003 do_libcall:
4004 fndecl = get_callee_fndecl (orig_exp);
4005 fcode = DECL_FUNCTION_CODE (fndecl);
4006 if (fcode == BUILT_IN_MEMSET)
4007 fn = build_call_expr (fndecl, 3, dest, val, len);
4008 else if (fcode == BUILT_IN_BZERO)
4009 fn = build_call_expr (fndecl, 2, dest, len);
4010 else
4011 gcc_unreachable ();
4012 if (TREE_CODE (fn) == CALL_EXPR)
4013 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4014 return expand_call (fn, target, target == const0_rtx);
4017 /* Expand expression EXP, which is a call to the bzero builtin. Return
4018 NULL_RTX if we failed the caller should emit a normal call. */
4020 static rtx
4021 expand_builtin_bzero (tree exp)
4023 tree dest, size;
4025 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4026 return NULL_RTX;
4028 dest = CALL_EXPR_ARG (exp, 0);
4029 size = CALL_EXPR_ARG (exp, 1);
4031 /* New argument list transforming bzero(ptr x, int y) to
4032 memset(ptr x, int 0, size_t y). This is done this way
4033 so that if it isn't expanded inline, we fallback to
4034 calling bzero instead of memset. */
4036 return expand_builtin_memset_args (dest, integer_zero_node,
4037 fold_convert (sizetype, size),
4038 const0_rtx, VOIDmode, exp);
4041 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4042 caller should emit a normal call, otherwise try to get the result
4043 in TARGET, if convenient (and in mode MODE if that's convenient). */
4045 static rtx
4046 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4048 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4049 INTEGER_TYPE, VOID_TYPE))
4051 tree type = TREE_TYPE (exp);
4052 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4053 CALL_EXPR_ARG (exp, 1),
4054 CALL_EXPR_ARG (exp, 2), type);
4055 if (result)
4056 return expand_expr (result, target, mode, EXPAND_NORMAL);
4058 return NULL_RTX;
4061 /* Expand expression EXP, which is a call to the memcmp built-in function.
4062 Return NULL_RTX if we failed and the
4063 caller should emit a normal call, otherwise try to get the result in
4064 TARGET, if convenient (and in mode MODE, if that's convenient). */
4066 static rtx
4067 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4069 if (!validate_arglist (exp,
4070 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4071 return NULL_RTX;
4072 else
4074 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4075 CALL_EXPR_ARG (exp, 1),
4076 CALL_EXPR_ARG (exp, 2));
4077 if (result)
4078 return expand_expr (result, target, mode, EXPAND_NORMAL);
4081 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4083 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4084 rtx result;
4085 rtx insn;
4086 tree arg1 = CALL_EXPR_ARG (exp, 0);
4087 tree arg2 = CALL_EXPR_ARG (exp, 1);
4088 tree len = CALL_EXPR_ARG (exp, 2);
4090 int arg1_align
4091 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4092 int arg2_align
4093 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4094 enum machine_mode insn_mode;
4096 #ifdef HAVE_cmpmemsi
4097 if (HAVE_cmpmemsi)
4098 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4099 else
4100 #endif
4101 #ifdef HAVE_cmpstrnsi
4102 if (HAVE_cmpstrnsi)
4103 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4104 else
4105 #endif
4106 return NULL_RTX;
4108 /* If we don't have POINTER_TYPE, call the function. */
4109 if (arg1_align == 0 || arg2_align == 0)
4110 return NULL_RTX;
4112 /* Make a place to write the result of the instruction. */
4113 result = target;
4114 if (! (result != 0
4115 && REG_P (result) && GET_MODE (result) == insn_mode
4116 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4117 result = gen_reg_rtx (insn_mode);
4119 arg1_rtx = get_memory_rtx (arg1, len);
4120 arg2_rtx = get_memory_rtx (arg2, len);
4121 arg3_rtx = expand_normal (len);
4123 /* Set MEM_SIZE as appropriate. */
4124 if (GET_CODE (arg3_rtx) == CONST_INT)
4126 set_mem_size (arg1_rtx, arg3_rtx);
4127 set_mem_size (arg2_rtx, arg3_rtx);
4130 #ifdef HAVE_cmpmemsi
4131 if (HAVE_cmpmemsi)
4132 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4133 GEN_INT (MIN (arg1_align, arg2_align)));
4134 else
4135 #endif
4136 #ifdef HAVE_cmpstrnsi
4137 if (HAVE_cmpstrnsi)
4138 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4139 GEN_INT (MIN (arg1_align, arg2_align)));
4140 else
4141 #endif
4142 gcc_unreachable ();
4144 if (insn)
4145 emit_insn (insn);
4146 else
4147 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4148 TYPE_MODE (integer_type_node), 3,
4149 XEXP (arg1_rtx, 0), Pmode,
4150 XEXP (arg2_rtx, 0), Pmode,
4151 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4152 TYPE_UNSIGNED (sizetype)),
4153 TYPE_MODE (sizetype));
4155 /* Return the value in the proper mode for this function. */
4156 mode = TYPE_MODE (TREE_TYPE (exp));
4157 if (GET_MODE (result) == mode)
4158 return result;
4159 else if (target != 0)
4161 convert_move (target, result, 0);
4162 return target;
4164 else
4165 return convert_to_mode (mode, result, 0);
4167 #endif
4169 return NULL_RTX;
4172 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4173 if we failed the caller should emit a normal call, otherwise try to get
4174 the result in TARGET, if convenient. */
4176 static rtx
4177 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4179 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4180 return NULL_RTX;
4181 else
4183 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4184 CALL_EXPR_ARG (exp, 1));
4185 if (result)
4186 return expand_expr (result, target, mode, EXPAND_NORMAL);
4189 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4190 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4191 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4193 rtx arg1_rtx, arg2_rtx;
4194 rtx result, insn = NULL_RTX;
4195 tree fndecl, fn;
4196 tree arg1 = CALL_EXPR_ARG (exp, 0);
4197 tree arg2 = CALL_EXPR_ARG (exp, 1);
4199 int arg1_align
4200 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4201 int arg2_align
4202 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4204 /* If we don't have POINTER_TYPE, call the function. */
4205 if (arg1_align == 0 || arg2_align == 0)
4206 return NULL_RTX;
4208 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4209 arg1 = builtin_save_expr (arg1);
4210 arg2 = builtin_save_expr (arg2);
4212 arg1_rtx = get_memory_rtx (arg1, NULL);
4213 arg2_rtx = get_memory_rtx (arg2, NULL);
4215 #ifdef HAVE_cmpstrsi
4216 /* Try to call cmpstrsi. */
4217 if (HAVE_cmpstrsi)
4219 enum machine_mode insn_mode
4220 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4222 /* Make a place to write the result of the instruction. */
4223 result = target;
4224 if (! (result != 0
4225 && REG_P (result) && GET_MODE (result) == insn_mode
4226 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4227 result = gen_reg_rtx (insn_mode);
4229 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4230 GEN_INT (MIN (arg1_align, arg2_align)));
4232 #endif
4233 #ifdef HAVE_cmpstrnsi
4234 /* Try to determine at least one length and call cmpstrnsi. */
4235 if (!insn && HAVE_cmpstrnsi)
4237 tree len;
4238 rtx arg3_rtx;
4240 enum machine_mode insn_mode
4241 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4242 tree len1 = c_strlen (arg1, 1);
4243 tree len2 = c_strlen (arg2, 1);
4245 if (len1)
4246 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4247 if (len2)
4248 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4250 /* If we don't have a constant length for the first, use the length
4251 of the second, if we know it. We don't require a constant for
4252 this case; some cost analysis could be done if both are available
4253 but neither is constant. For now, assume they're equally cheap,
4254 unless one has side effects. If both strings have constant lengths,
4255 use the smaller. */
4257 if (!len1)
4258 len = len2;
4259 else if (!len2)
4260 len = len1;
4261 else if (TREE_SIDE_EFFECTS (len1))
4262 len = len2;
4263 else if (TREE_SIDE_EFFECTS (len2))
4264 len = len1;
4265 else if (TREE_CODE (len1) != INTEGER_CST)
4266 len = len2;
4267 else if (TREE_CODE (len2) != INTEGER_CST)
4268 len = len1;
4269 else if (tree_int_cst_lt (len1, len2))
4270 len = len1;
4271 else
4272 len = len2;
4274 /* If both arguments have side effects, we cannot optimize. */
4275 if (!len || TREE_SIDE_EFFECTS (len))
4276 goto do_libcall;
4278 arg3_rtx = expand_normal (len);
4280 /* Make a place to write the result of the instruction. */
4281 result = target;
4282 if (! (result != 0
4283 && REG_P (result) && GET_MODE (result) == insn_mode
4284 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4285 result = gen_reg_rtx (insn_mode);
4287 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4288 GEN_INT (MIN (arg1_align, arg2_align)));
4290 #endif
4292 if (insn)
4294 emit_insn (insn);
4296 /* Return the value in the proper mode for this function. */
4297 mode = TYPE_MODE (TREE_TYPE (exp));
4298 if (GET_MODE (result) == mode)
4299 return result;
4300 if (target == 0)
4301 return convert_to_mode (mode, result, 0);
4302 convert_move (target, result, 0);
4303 return target;
4306 /* Expand the library call ourselves using a stabilized argument
4307 list to avoid re-evaluating the function's arguments twice. */
4308 #ifdef HAVE_cmpstrnsi
4309 do_libcall:
4310 #endif
4311 fndecl = get_callee_fndecl (exp);
4312 fn = build_call_expr (fndecl, 2, arg1, arg2);
4313 if (TREE_CODE (fn) == CALL_EXPR)
4314 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4315 return expand_call (fn, target, target == const0_rtx);
4317 #endif
4318 return NULL_RTX;
4321 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4322 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4323 the result in TARGET, if convenient. */
4325 static rtx
4326 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4328 if (!validate_arglist (exp,
4329 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4330 return NULL_RTX;
4331 else
4333 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4334 CALL_EXPR_ARG (exp, 1),
4335 CALL_EXPR_ARG (exp, 2));
4336 if (result)
4337 return expand_expr (result, target, mode, EXPAND_NORMAL);
4340 /* If c_strlen can determine an expression for one of the string
4341 lengths, and it doesn't have side effects, then emit cmpstrnsi
4342 using length MIN(strlen(string)+1, arg3). */
4343 #ifdef HAVE_cmpstrnsi
4344 if (HAVE_cmpstrnsi)
4346 tree len, len1, len2;
4347 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4348 rtx result, insn;
4349 tree fndecl, fn;
4350 tree arg1 = CALL_EXPR_ARG (exp, 0);
4351 tree arg2 = CALL_EXPR_ARG (exp, 1);
4352 tree arg3 = CALL_EXPR_ARG (exp, 2);
4354 int arg1_align
4355 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4356 int arg2_align
4357 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4358 enum machine_mode insn_mode
4359 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4361 len1 = c_strlen (arg1, 1);
4362 len2 = c_strlen (arg2, 1);
4364 if (len1)
4365 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4366 if (len2)
4367 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4369 /* If we don't have a constant length for the first, use the length
4370 of the second, if we know it. We don't require a constant for
4371 this case; some cost analysis could be done if both are available
4372 but neither is constant. For now, assume they're equally cheap,
4373 unless one has side effects. If both strings have constant lengths,
4374 use the smaller. */
4376 if (!len1)
4377 len = len2;
4378 else if (!len2)
4379 len = len1;
4380 else if (TREE_SIDE_EFFECTS (len1))
4381 len = len2;
4382 else if (TREE_SIDE_EFFECTS (len2))
4383 len = len1;
4384 else if (TREE_CODE (len1) != INTEGER_CST)
4385 len = len2;
4386 else if (TREE_CODE (len2) != INTEGER_CST)
4387 len = len1;
4388 else if (tree_int_cst_lt (len1, len2))
4389 len = len1;
4390 else
4391 len = len2;
4393 /* If both arguments have side effects, we cannot optimize. */
4394 if (!len || TREE_SIDE_EFFECTS (len))
4395 return NULL_RTX;
4397 /* The actual new length parameter is MIN(len,arg3). */
4398 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4399 fold_convert (TREE_TYPE (len), arg3));
4401 /* If we don't have POINTER_TYPE, call the function. */
4402 if (arg1_align == 0 || arg2_align == 0)
4403 return NULL_RTX;
4405 /* Make a place to write the result of the instruction. */
4406 result = target;
4407 if (! (result != 0
4408 && REG_P (result) && GET_MODE (result) == insn_mode
4409 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4410 result = gen_reg_rtx (insn_mode);
4412 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4413 arg1 = builtin_save_expr (arg1);
4414 arg2 = builtin_save_expr (arg2);
4415 len = builtin_save_expr (len);
4417 arg1_rtx = get_memory_rtx (arg1, len);
4418 arg2_rtx = get_memory_rtx (arg2, len);
4419 arg3_rtx = expand_normal (len);
4420 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4421 GEN_INT (MIN (arg1_align, arg2_align)));
4422 if (insn)
4424 emit_insn (insn);
4426 /* Return the value in the proper mode for this function. */
4427 mode = TYPE_MODE (TREE_TYPE (exp));
4428 if (GET_MODE (result) == mode)
4429 return result;
4430 if (target == 0)
4431 return convert_to_mode (mode, result, 0);
4432 convert_move (target, result, 0);
4433 return target;
4436 /* Expand the library call ourselves using a stabilized argument
4437 list to avoid re-evaluating the function's arguments twice. */
4438 fndecl = get_callee_fndecl (exp);
4439 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4440 if (TREE_CODE (fn) == CALL_EXPR)
4441 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4442 return expand_call (fn, target, target == const0_rtx);
4444 #endif
4445 return NULL_RTX;
4448 /* Expand expression EXP, which is a call to the strcat builtin.
4449 Return NULL_RTX if we failed the caller should emit a normal call,
4450 otherwise try to get the result in TARGET, if convenient. */
4452 static rtx
4453 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4455 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4456 return NULL_RTX;
4457 else
4459 tree dst = CALL_EXPR_ARG (exp, 0);
4460 tree src = CALL_EXPR_ARG (exp, 1);
4461 const char *p = c_getstr (src);
4463 /* If the string length is zero, return the dst parameter. */
4464 if (p && *p == '\0')
4465 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4467 if (!optimize_size)
4469 /* See if we can store by pieces into (dst + strlen(dst)). */
4470 tree newsrc, newdst,
4471 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4472 rtx insns;
4474 /* Stabilize the argument list. */
4475 newsrc = builtin_save_expr (src);
4476 dst = builtin_save_expr (dst);
4478 start_sequence ();
4480 /* Create strlen (dst). */
4481 newdst = build_call_expr (strlen_fn, 1, dst);
4482 /* Create (dst p+ strlen (dst)). */
4484 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4485 newdst = builtin_save_expr (newdst);
4487 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4489 end_sequence (); /* Stop sequence. */
4490 return NULL_RTX;
4493 /* Output the entire sequence. */
4494 insns = get_insns ();
4495 end_sequence ();
4496 emit_insn (insns);
4498 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4501 return NULL_RTX;
4505 /* Expand expression EXP, which is a call to the strncat builtin.
4506 Return NULL_RTX if we failed the caller should emit a normal call,
4507 otherwise try to get the result in TARGET, if convenient. */
4509 static rtx
4510 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4512 if (validate_arglist (exp,
4513 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4515 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4516 CALL_EXPR_ARG (exp, 1),
4517 CALL_EXPR_ARG (exp, 2));
4518 if (result)
4519 return expand_expr (result, target, mode, EXPAND_NORMAL);
4521 return NULL_RTX;
4524 /* Expand expression EXP, which is a call to the strspn builtin.
4525 Return NULL_RTX if we failed the caller should emit a normal call,
4526 otherwise try to get the result in TARGET, if convenient. */
4528 static rtx
4529 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4531 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4533 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4534 CALL_EXPR_ARG (exp, 1));
4535 if (result)
4536 return expand_expr (result, target, mode, EXPAND_NORMAL);
4538 return NULL_RTX;
4541 /* Expand expression EXP, which is a call to the strcspn builtin.
4542 Return NULL_RTX if we failed the caller should emit a normal call,
4543 otherwise try to get the result in TARGET, if convenient. */
4545 static rtx
4546 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4548 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4550 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4551 CALL_EXPR_ARG (exp, 1));
4552 if (result)
4553 return expand_expr (result, target, mode, EXPAND_NORMAL);
4555 return NULL_RTX;
4558 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4559 if that's convenient. */
4562 expand_builtin_saveregs (void)
4564 rtx val, seq;
4566 /* Don't do __builtin_saveregs more than once in a function.
4567 Save the result of the first call and reuse it. */
4568 if (saveregs_value != 0)
4569 return saveregs_value;
4571 /* When this function is called, it means that registers must be
4572 saved on entry to this function. So we migrate the call to the
4573 first insn of this function. */
4575 start_sequence ();
4577 /* Do whatever the machine needs done in this case. */
4578 val = targetm.calls.expand_builtin_saveregs ();
4580 seq = get_insns ();
4581 end_sequence ();
4583 saveregs_value = val;
4585 /* Put the insns after the NOTE that starts the function. If this
4586 is inside a start_sequence, make the outer-level insn chain current, so
4587 the code is placed at the start of the function. */
4588 push_topmost_sequence ();
4589 emit_insn_after (seq, entry_of_function ());
4590 pop_topmost_sequence ();
4592 return val;
4595 /* __builtin_args_info (N) returns word N of the arg space info
4596 for the current function. The number and meanings of words
4597 is controlled by the definition of CUMULATIVE_ARGS. */
4599 static rtx
4600 expand_builtin_args_info (tree exp)
4602 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4603 int *word_ptr = (int *) &crtl->args.info;
4605 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4607 if (call_expr_nargs (exp) != 0)
4609 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4610 error ("argument of %<__builtin_args_info%> must be constant");
4611 else
4613 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4615 if (wordnum < 0 || wordnum >= nwords)
4616 error ("argument of %<__builtin_args_info%> out of range");
4617 else
4618 return GEN_INT (word_ptr[wordnum]);
4621 else
4622 error ("missing argument in %<__builtin_args_info%>");
4624 return const0_rtx;
4627 /* Expand a call to __builtin_next_arg. */
4629 static rtx
4630 expand_builtin_next_arg (void)
4632 /* Checking arguments is already done in fold_builtin_next_arg
4633 that must be called before this function. */
4634 return expand_binop (ptr_mode, add_optab,
4635 crtl->args.internal_arg_pointer,
4636 crtl->args.arg_offset_rtx,
4637 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4640 /* Make it easier for the backends by protecting the valist argument
4641 from multiple evaluations. */
4643 static tree
4644 stabilize_va_list (tree valist, int needs_lvalue)
4646 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4648 if (TREE_SIDE_EFFECTS (valist))
4649 valist = save_expr (valist);
4651 /* For this case, the backends will be expecting a pointer to
4652 TREE_TYPE (va_list_type_node), but it's possible we've
4653 actually been given an array (an actual va_list_type_node).
4654 So fix it. */
4655 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4657 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4658 valist = build_fold_addr_expr_with_type (valist, p1);
4661 else
4663 tree pt;
4665 if (! needs_lvalue)
4667 if (! TREE_SIDE_EFFECTS (valist))
4668 return valist;
4670 pt = build_pointer_type (va_list_type_node);
4671 valist = fold_build1 (ADDR_EXPR, pt, valist);
4672 TREE_SIDE_EFFECTS (valist) = 1;
4675 if (TREE_SIDE_EFFECTS (valist))
4676 valist = save_expr (valist);
4677 valist = build_fold_indirect_ref (valist);
4680 return valist;
4683 /* The "standard" definition of va_list is void*. */
4685 tree
4686 std_build_builtin_va_list (void)
4688 return ptr_type_node;
4691 /* The "standard" implementation of va_start: just assign `nextarg' to
4692 the variable. */
4694 void
4695 std_expand_builtin_va_start (tree valist, rtx nextarg)
4697 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4698 convert_move (va_r, nextarg, 0);
4701 /* Expand EXP, a call to __builtin_va_start. */
4703 static rtx
4704 expand_builtin_va_start (tree exp)
4706 rtx nextarg;
4707 tree valist;
4709 if (call_expr_nargs (exp) < 2)
4711 error ("too few arguments to function %<va_start%>");
4712 return const0_rtx;
4715 if (fold_builtin_next_arg (exp, true))
4716 return const0_rtx;
4718 nextarg = expand_builtin_next_arg ();
4719 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4721 if (targetm.expand_builtin_va_start)
4722 targetm.expand_builtin_va_start (valist, nextarg);
4723 else
4724 std_expand_builtin_va_start (valist, nextarg);
4726 return const0_rtx;
4729 /* The "standard" implementation of va_arg: read the value from the
4730 current (padded) address and increment by the (padded) size. */
4732 tree
4733 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4735 tree addr, t, type_size, rounded_size, valist_tmp;
4736 unsigned HOST_WIDE_INT align, boundary;
4737 bool indirect;
4739 #ifdef ARGS_GROW_DOWNWARD
4740 /* All of the alignment and movement below is for args-grow-up machines.
4741 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4742 implement their own specialized gimplify_va_arg_expr routines. */
4743 gcc_unreachable ();
4744 #endif
4746 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4747 if (indirect)
4748 type = build_pointer_type (type);
4750 align = PARM_BOUNDARY / BITS_PER_UNIT;
4751 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4753 /* Hoist the valist value into a temporary for the moment. */
4754 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4756 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4757 requires greater alignment, we must perform dynamic alignment. */
4758 if (boundary > align
4759 && !integer_zerop (TYPE_SIZE (type)))
4761 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4762 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4763 valist_tmp, size_int (boundary - 1)));
4764 gimplify_and_add (t, pre_p);
4766 t = fold_convert (sizetype, valist_tmp);
4767 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4768 fold_convert (TREE_TYPE (valist),
4769 fold_build2 (BIT_AND_EXPR, sizetype, t,
4770 size_int (-boundary))));
4771 gimplify_and_add (t, pre_p);
4773 else
4774 boundary = align;
4776 /* If the actual alignment is less than the alignment of the type,
4777 adjust the type accordingly so that we don't assume strict alignment
4778 when deferencing the pointer. */
4779 boundary *= BITS_PER_UNIT;
4780 if (boundary < TYPE_ALIGN (type))
4782 type = build_variant_type_copy (type);
4783 TYPE_ALIGN (type) = boundary;
4786 /* Compute the rounded size of the type. */
4787 type_size = size_in_bytes (type);
4788 rounded_size = round_up (type_size, align);
4790 /* Reduce rounded_size so it's sharable with the postqueue. */
4791 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4793 /* Get AP. */
4794 addr = valist_tmp;
4795 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4797 /* Small args are padded downward. */
4798 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4799 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4800 size_binop (MINUS_EXPR, rounded_size, type_size));
4801 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4804 /* Compute new value for AP. */
4805 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4806 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4807 gimplify_and_add (t, pre_p);
4809 addr = fold_convert (build_pointer_type (type), addr);
4811 if (indirect)
4812 addr = build_va_arg_indirect_ref (addr);
4814 return build_va_arg_indirect_ref (addr);
4817 /* Build an indirect-ref expression over the given TREE, which represents a
4818 piece of a va_arg() expansion. */
4819 tree
4820 build_va_arg_indirect_ref (tree addr)
4822 addr = build_fold_indirect_ref (addr);
4824 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4825 mf_mark (addr);
4827 return addr;
4830 /* Return a dummy expression of type TYPE in order to keep going after an
4831 error. */
4833 static tree
4834 dummy_object (tree type)
4836 tree t = build_int_cst (build_pointer_type (type), 0);
4837 return build1 (INDIRECT_REF, type, t);
4840 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4841 builtin function, but a very special sort of operator. */
4843 enum gimplify_status
4844 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4846 tree promoted_type, want_va_type, have_va_type;
4847 tree valist = TREE_OPERAND (*expr_p, 0);
4848 tree type = TREE_TYPE (*expr_p);
4849 tree t;
4851 /* Verify that valist is of the proper type. */
4852 want_va_type = va_list_type_node;
4853 have_va_type = TREE_TYPE (valist);
4855 if (have_va_type == error_mark_node)
4856 return GS_ERROR;
4858 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4860 /* If va_list is an array type, the argument may have decayed
4861 to a pointer type, e.g. by being passed to another function.
4862 In that case, unwrap both types so that we can compare the
4863 underlying records. */
4864 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4865 || POINTER_TYPE_P (have_va_type))
4867 want_va_type = TREE_TYPE (want_va_type);
4868 have_va_type = TREE_TYPE (have_va_type);
4872 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4874 error ("first argument to %<va_arg%> not of type %<va_list%>");
4875 return GS_ERROR;
4878 /* Generate a diagnostic for requesting data of a type that cannot
4879 be passed through `...' due to type promotion at the call site. */
4880 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4881 != type)
4883 static bool gave_help;
4885 /* Unfortunately, this is merely undefined, rather than a constraint
4886 violation, so we cannot make this an error. If this call is never
4887 executed, the program is still strictly conforming. */
4888 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4889 type, promoted_type);
4890 if (! gave_help)
4892 gave_help = true;
4893 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4894 promoted_type, type);
4897 /* We can, however, treat "undefined" any way we please.
4898 Call abort to encourage the user to fix the program. */
4899 inform ("if this code is reached, the program will abort");
4900 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4901 append_to_statement_list (t, pre_p);
4903 /* This is dead code, but go ahead and finish so that the
4904 mode of the result comes out right. */
4905 *expr_p = dummy_object (type);
4906 return GS_ALL_DONE;
4908 else
4910 /* Make it easier for the backends by protecting the valist argument
4911 from multiple evaluations. */
4912 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4914 /* For this case, the backends will be expecting a pointer to
4915 TREE_TYPE (va_list_type_node), but it's possible we've
4916 actually been given an array (an actual va_list_type_node).
4917 So fix it. */
4918 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4920 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4921 valist = build_fold_addr_expr_with_type (valist, p1);
4923 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4925 else
4926 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4928 if (!targetm.gimplify_va_arg_expr)
4929 /* FIXME:Once most targets are converted we should merely
4930 assert this is non-null. */
4931 return GS_ALL_DONE;
4933 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4934 return GS_OK;
4938 /* Expand EXP, a call to __builtin_va_end. */
4940 static rtx
4941 expand_builtin_va_end (tree exp)
4943 tree valist = CALL_EXPR_ARG (exp, 0);
4945 /* Evaluate for side effects, if needed. I hate macros that don't
4946 do that. */
4947 if (TREE_SIDE_EFFECTS (valist))
4948 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4950 return const0_rtx;
4953 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4954 builtin rather than just as an assignment in stdarg.h because of the
4955 nastiness of array-type va_list types. */
4957 static rtx
4958 expand_builtin_va_copy (tree exp)
4960 tree dst, src, t;
4962 dst = CALL_EXPR_ARG (exp, 0);
4963 src = CALL_EXPR_ARG (exp, 1);
4965 dst = stabilize_va_list (dst, 1);
4966 src = stabilize_va_list (src, 0);
4968 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4970 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4971 TREE_SIDE_EFFECTS (t) = 1;
4972 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4974 else
4976 rtx dstb, srcb, size;
4978 /* Evaluate to pointers. */
4979 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4980 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4981 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4982 VOIDmode, EXPAND_NORMAL);
4984 dstb = convert_memory_address (Pmode, dstb);
4985 srcb = convert_memory_address (Pmode, srcb);
4987 /* "Dereference" to BLKmode memories. */
4988 dstb = gen_rtx_MEM (BLKmode, dstb);
4989 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4990 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4991 srcb = gen_rtx_MEM (BLKmode, srcb);
4992 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4993 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4995 /* Copy. */
4996 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4999 return const0_rtx;
5002 /* Expand a call to one of the builtin functions __builtin_frame_address or
5003 __builtin_return_address. */
5005 static rtx
5006 expand_builtin_frame_address (tree fndecl, tree exp)
5008 /* The argument must be a nonnegative integer constant.
5009 It counts the number of frames to scan up the stack.
5010 The value is the return address saved in that frame. */
5011 if (call_expr_nargs (exp) == 0)
5012 /* Warning about missing arg was already issued. */
5013 return const0_rtx;
5014 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5016 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5017 error ("invalid argument to %<__builtin_frame_address%>");
5018 else
5019 error ("invalid argument to %<__builtin_return_address%>");
5020 return const0_rtx;
5022 else
5024 rtx tem
5025 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5026 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5028 /* Some ports cannot access arbitrary stack frames. */
5029 if (tem == NULL)
5031 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5032 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5033 else
5034 warning (0, "unsupported argument to %<__builtin_return_address%>");
5035 return const0_rtx;
5038 /* For __builtin_frame_address, return what we've got. */
5039 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5040 return tem;
5042 if (!REG_P (tem)
5043 && ! CONSTANT_P (tem))
5044 tem = copy_to_mode_reg (Pmode, tem);
5045 return tem;
5049 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5050 we failed and the caller should emit a normal call, otherwise try to get
5051 the result in TARGET, if convenient. */
5053 static rtx
5054 expand_builtin_alloca (tree exp, rtx target)
5056 rtx op0;
5057 rtx result;
5059 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5060 should always expand to function calls. These can be intercepted
5061 in libmudflap. */
5062 if (flag_mudflap)
5063 return NULL_RTX;
5065 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5066 return NULL_RTX;
5068 /* Compute the argument. */
5069 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5071 /* Allocate the desired space. */
5072 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5073 result = convert_memory_address (ptr_mode, result);
5075 return result;
5078 /* Expand a call to a bswap builtin with argument ARG0. MODE
5079 is the mode to expand with. */
5081 static rtx
5082 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5084 enum machine_mode mode;
5085 tree arg;
5086 rtx op0;
5088 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5089 return NULL_RTX;
5091 arg = CALL_EXPR_ARG (exp, 0);
5092 mode = TYPE_MODE (TREE_TYPE (arg));
5093 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5095 target = expand_unop (mode, bswap_optab, op0, target, 1);
5097 gcc_assert (target);
5099 return convert_to_mode (mode, target, 0);
5102 /* Expand a call to a unary builtin in EXP.
5103 Return NULL_RTX if a normal call should be emitted rather than expanding the
5104 function in-line. If convenient, the result should be placed in TARGET.
5105 SUBTARGET may be used as the target for computing one of EXP's operands. */
5107 static rtx
5108 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5109 rtx subtarget, optab op_optab)
5111 rtx op0;
5113 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5114 return NULL_RTX;
5116 /* Compute the argument. */
5117 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5118 VOIDmode, EXPAND_NORMAL);
5119 /* Compute op, into TARGET if possible.
5120 Set TARGET to wherever the result comes back. */
5121 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5122 op_optab, op0, target, 1);
5123 gcc_assert (target);
5125 return convert_to_mode (target_mode, target, 0);
5128 /* If the string passed to fputs is a constant and is one character
5129 long, we attempt to transform this call into __builtin_fputc(). */
5131 static rtx
5132 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5134 /* Verify the arguments in the original call. */
5135 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5137 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5138 CALL_EXPR_ARG (exp, 1),
5139 (target == const0_rtx),
5140 unlocked, NULL_TREE);
5141 if (result)
5142 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5144 return NULL_RTX;
5147 /* Expand a call to __builtin_expect. We just return our argument
5148 as the builtin_expect semantic should've been already executed by
5149 tree branch prediction pass. */
5151 static rtx
5152 expand_builtin_expect (tree exp, rtx target)
5154 tree arg, c;
5156 if (call_expr_nargs (exp) < 2)
5157 return const0_rtx;
5158 arg = CALL_EXPR_ARG (exp, 0);
5159 c = CALL_EXPR_ARG (exp, 1);
5161 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5162 /* When guessing was done, the hints should be already stripped away. */
5163 gcc_assert (!flag_guess_branch_prob
5164 || optimize == 0 || errorcount || sorrycount);
5165 return target;
5168 void
5169 expand_builtin_trap (void)
5171 #ifdef HAVE_trap
5172 if (HAVE_trap)
5173 emit_insn (gen_trap ());
5174 else
5175 #endif
5176 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5177 emit_barrier ();
5180 /* Expand EXP, a call to fabs, fabsf or fabsl.
5181 Return NULL_RTX if a normal call should be emitted rather than expanding
5182 the function inline. If convenient, the result should be placed
5183 in TARGET. SUBTARGET may be used as the target for computing
5184 the operand. */
5186 static rtx
5187 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5189 enum machine_mode mode;
5190 tree arg;
5191 rtx op0;
5193 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5194 return NULL_RTX;
5196 arg = CALL_EXPR_ARG (exp, 0);
5197 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5198 mode = TYPE_MODE (TREE_TYPE (arg));
5199 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5200 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5203 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5204 Return NULL is a normal call should be emitted rather than expanding the
5205 function inline. If convenient, the result should be placed in TARGET.
5206 SUBTARGET may be used as the target for computing the operand. */
5208 static rtx
5209 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5211 rtx op0, op1;
5212 tree arg;
5214 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5215 return NULL_RTX;
5217 arg = CALL_EXPR_ARG (exp, 0);
5218 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5220 arg = CALL_EXPR_ARG (exp, 1);
5221 op1 = expand_normal (arg);
5223 return expand_copysign (op0, op1, target);
5226 /* Create a new constant string literal and return a char* pointer to it.
5227 The STRING_CST value is the LEN characters at STR. */
5228 tree
5229 build_string_literal (int len, const char *str)
5231 tree t, elem, index, type;
5233 t = build_string (len, str);
5234 elem = build_type_variant (char_type_node, 1, 0);
5235 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5236 type = build_array_type (elem, index);
5237 TREE_TYPE (t) = type;
5238 TREE_CONSTANT (t) = 1;
5239 TREE_READONLY (t) = 1;
5240 TREE_STATIC (t) = 1;
5242 type = build_pointer_type (type);
5243 t = build1 (ADDR_EXPR, type, t);
5245 type = build_pointer_type (elem);
5246 t = build1 (NOP_EXPR, type, t);
5247 return t;
5250 /* Expand EXP, a call to printf or printf_unlocked.
5251 Return NULL_RTX if a normal call should be emitted rather than transforming
5252 the function inline. If convenient, the result should be placed in
5253 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5254 call. */
5255 static rtx
5256 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5257 bool unlocked)
5259 /* If we're using an unlocked function, assume the other unlocked
5260 functions exist explicitly. */
5261 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5262 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5263 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5264 : implicit_built_in_decls[BUILT_IN_PUTS];
5265 const char *fmt_str;
5266 tree fn = 0;
5267 tree fmt, arg;
5268 int nargs = call_expr_nargs (exp);
5270 /* If the return value is used, don't do the transformation. */
5271 if (target != const0_rtx)
5272 return NULL_RTX;
5274 /* Verify the required arguments in the original call. */
5275 if (nargs == 0)
5276 return NULL_RTX;
5277 fmt = CALL_EXPR_ARG (exp, 0);
5278 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5279 return NULL_RTX;
5281 /* Check whether the format is a literal string constant. */
5282 fmt_str = c_getstr (fmt);
5283 if (fmt_str == NULL)
5284 return NULL_RTX;
5286 if (!init_target_chars ())
5287 return NULL_RTX;
5289 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5290 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5292 if ((nargs != 2)
5293 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5294 return NULL_RTX;
5295 if (fn_puts)
5296 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5298 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5299 else if (strcmp (fmt_str, target_percent_c) == 0)
5301 if ((nargs != 2)
5302 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5303 return NULL_RTX;
5304 if (fn_putchar)
5305 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5307 else
5309 /* We can't handle anything else with % args or %% ... yet. */
5310 if (strchr (fmt_str, target_percent))
5311 return NULL_RTX;
5313 if (nargs > 1)
5314 return NULL_RTX;
5316 /* If the format specifier was "", printf does nothing. */
5317 if (fmt_str[0] == '\0')
5318 return const0_rtx;
5319 /* If the format specifier has length of 1, call putchar. */
5320 if (fmt_str[1] == '\0')
5322 /* Given printf("c"), (where c is any one character,)
5323 convert "c"[0] to an int and pass that to the replacement
5324 function. */
5325 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5326 if (fn_putchar)
5327 fn = build_call_expr (fn_putchar, 1, arg);
5329 else
5331 /* If the format specifier was "string\n", call puts("string"). */
5332 size_t len = strlen (fmt_str);
5333 if ((unsigned char)fmt_str[len - 1] == target_newline)
5335 /* Create a NUL-terminated string that's one char shorter
5336 than the original, stripping off the trailing '\n'. */
5337 char *newstr = alloca (len);
5338 memcpy (newstr, fmt_str, len - 1);
5339 newstr[len - 1] = 0;
5340 arg = build_string_literal (len, newstr);
5341 if (fn_puts)
5342 fn = build_call_expr (fn_puts, 1, arg);
5344 else
5345 /* We'd like to arrange to call fputs(string,stdout) here,
5346 but we need stdout and don't have a way to get it yet. */
5347 return NULL_RTX;
5351 if (!fn)
5352 return NULL_RTX;
5353 if (TREE_CODE (fn) == CALL_EXPR)
5354 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5355 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5358 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5359 Return NULL_RTX if a normal call should be emitted rather than transforming
5360 the function inline. If convenient, the result should be placed in
5361 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5362 call. */
5363 static rtx
5364 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5365 bool unlocked)
5367 /* If we're using an unlocked function, assume the other unlocked
5368 functions exist explicitly. */
5369 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5370 : implicit_built_in_decls[BUILT_IN_FPUTC];
5371 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5372 : implicit_built_in_decls[BUILT_IN_FPUTS];
5373 const char *fmt_str;
5374 tree fn = 0;
5375 tree fmt, fp, arg;
5376 int nargs = call_expr_nargs (exp);
5378 /* If the return value is used, don't do the transformation. */
5379 if (target != const0_rtx)
5380 return NULL_RTX;
5382 /* Verify the required arguments in the original call. */
5383 if (nargs < 2)
5384 return NULL_RTX;
5385 fp = CALL_EXPR_ARG (exp, 0);
5386 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5387 return NULL_RTX;
5388 fmt = CALL_EXPR_ARG (exp, 1);
5389 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5390 return NULL_RTX;
5392 /* Check whether the format is a literal string constant. */
5393 fmt_str = c_getstr (fmt);
5394 if (fmt_str == NULL)
5395 return NULL_RTX;
5397 if (!init_target_chars ())
5398 return NULL_RTX;
5400 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5401 if (strcmp (fmt_str, target_percent_s) == 0)
5403 if ((nargs != 3)
5404 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5405 return NULL_RTX;
5406 arg = CALL_EXPR_ARG (exp, 2);
5407 if (fn_fputs)
5408 fn = build_call_expr (fn_fputs, 2, arg, fp);
5410 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5411 else if (strcmp (fmt_str, target_percent_c) == 0)
5413 if ((nargs != 3)
5414 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5415 return NULL_RTX;
5416 arg = CALL_EXPR_ARG (exp, 2);
5417 if (fn_fputc)
5418 fn = build_call_expr (fn_fputc, 2, arg, fp);
5420 else
5422 /* We can't handle anything else with % args or %% ... yet. */
5423 if (strchr (fmt_str, target_percent))
5424 return NULL_RTX;
5426 if (nargs > 2)
5427 return NULL_RTX;
5429 /* If the format specifier was "", fprintf does nothing. */
5430 if (fmt_str[0] == '\0')
5432 /* Evaluate and ignore FILE* argument for side-effects. */
5433 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5434 return const0_rtx;
5437 /* When "string" doesn't contain %, replace all cases of
5438 fprintf(stream,string) with fputs(string,stream). The fputs
5439 builtin will take care of special cases like length == 1. */
5440 if (fn_fputs)
5441 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5444 if (!fn)
5445 return NULL_RTX;
5446 if (TREE_CODE (fn) == CALL_EXPR)
5447 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5448 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5451 /* Expand a call EXP to sprintf. Return NULL_RTX if
5452 a normal call should be emitted rather than expanding the function
5453 inline. If convenient, the result should be placed in TARGET with
5454 mode MODE. */
5456 static rtx
5457 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5459 tree dest, fmt;
5460 const char *fmt_str;
5461 int nargs = call_expr_nargs (exp);
5463 /* Verify the required arguments in the original call. */
5464 if (nargs < 2)
5465 return NULL_RTX;
5466 dest = CALL_EXPR_ARG (exp, 0);
5467 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5468 return NULL_RTX;
5469 fmt = CALL_EXPR_ARG (exp, 0);
5470 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5471 return NULL_RTX;
5473 /* Check whether the format is a literal string constant. */
5474 fmt_str = c_getstr (fmt);
5475 if (fmt_str == NULL)
5476 return NULL_RTX;
5478 if (!init_target_chars ())
5479 return NULL_RTX;
5481 /* If the format doesn't contain % args or %%, use strcpy. */
5482 if (strchr (fmt_str, target_percent) == 0)
5484 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5485 tree exp;
5487 if ((nargs > 2) || ! fn)
5488 return NULL_RTX;
5489 expand_expr (build_call_expr (fn, 2, dest, fmt),
5490 const0_rtx, VOIDmode, EXPAND_NORMAL);
5491 if (target == const0_rtx)
5492 return const0_rtx;
5493 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5494 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5496 /* If the format is "%s", use strcpy if the result isn't used. */
5497 else if (strcmp (fmt_str, target_percent_s) == 0)
5499 tree fn, arg, len;
5500 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5502 if (! fn)
5503 return NULL_RTX;
5504 if (nargs != 3)
5505 return NULL_RTX;
5506 arg = CALL_EXPR_ARG (exp, 2);
5507 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5508 return NULL_RTX;
5510 if (target != const0_rtx)
5512 len = c_strlen (arg, 1);
5513 if (! len || TREE_CODE (len) != INTEGER_CST)
5514 return NULL_RTX;
5516 else
5517 len = NULL_TREE;
5519 expand_expr (build_call_expr (fn, 2, dest, arg),
5520 const0_rtx, VOIDmode, EXPAND_NORMAL);
5522 if (target == const0_rtx)
5523 return const0_rtx;
5524 return expand_expr (len, target, mode, EXPAND_NORMAL);
5527 return NULL_RTX;
5530 /* Expand a call to either the entry or exit function profiler. */
5532 static rtx
5533 expand_builtin_profile_func (bool exitp)
5535 rtx this, which;
5537 this = DECL_RTL (current_function_decl);
5538 gcc_assert (MEM_P (this));
5539 this = XEXP (this, 0);
5541 if (exitp)
5542 which = profile_function_exit_libfunc;
5543 else
5544 which = profile_function_entry_libfunc;
5546 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5547 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5549 Pmode);
5551 return const0_rtx;
5554 /* Expand a call to __builtin___clear_cache. */
5556 static rtx
5557 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5559 #ifndef HAVE_clear_cache
5560 #ifdef CLEAR_INSN_CACHE
5561 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5562 does something. Just do the default expansion to a call to
5563 __clear_cache(). */
5564 return NULL_RTX;
5565 #else
5566 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5567 does nothing. There is no need to call it. Do nothing. */
5568 return const0_rtx;
5569 #endif /* CLEAR_INSN_CACHE */
5570 #else
5571 /* We have a "clear_cache" insn, and it will handle everything. */
5572 tree begin, end;
5573 rtx begin_rtx, end_rtx;
5574 enum insn_code icode;
5576 /* We must not expand to a library call. If we did, any
5577 fallback library function in libgcc that might contain a call to
5578 __builtin___clear_cache() would recurse infinitely. */
5579 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5581 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5582 return const0_rtx;
5585 if (HAVE_clear_cache)
5587 icode = CODE_FOR_clear_cache;
5589 begin = CALL_EXPR_ARG (exp, 0);
5590 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5591 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5592 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5593 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5595 end = CALL_EXPR_ARG (exp, 1);
5596 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5597 end_rtx = convert_memory_address (Pmode, end_rtx);
5598 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5599 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5601 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5603 return const0_rtx;
5604 #endif /* HAVE_clear_cache */
5607 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5609 static rtx
5610 round_trampoline_addr (rtx tramp)
5612 rtx temp, addend, mask;
5614 /* If we don't need too much alignment, we'll have been guaranteed
5615 proper alignment by get_trampoline_type. */
5616 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5617 return tramp;
5619 /* Round address up to desired boundary. */
5620 temp = gen_reg_rtx (Pmode);
5621 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5622 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5624 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5625 temp, 0, OPTAB_LIB_WIDEN);
5626 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5627 temp, 0, OPTAB_LIB_WIDEN);
5629 return tramp;
5632 static rtx
5633 expand_builtin_init_trampoline (tree exp)
5635 tree t_tramp, t_func, t_chain;
5636 rtx r_tramp, r_func, r_chain;
5637 #ifdef TRAMPOLINE_TEMPLATE
5638 rtx blktramp;
5639 #endif
5641 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5642 POINTER_TYPE, VOID_TYPE))
5643 return NULL_RTX;
5645 t_tramp = CALL_EXPR_ARG (exp, 0);
5646 t_func = CALL_EXPR_ARG (exp, 1);
5647 t_chain = CALL_EXPR_ARG (exp, 2);
5649 r_tramp = expand_normal (t_tramp);
5650 r_func = expand_normal (t_func);
5651 r_chain = expand_normal (t_chain);
5653 /* Generate insns to initialize the trampoline. */
5654 r_tramp = round_trampoline_addr (r_tramp);
5655 #ifdef TRAMPOLINE_TEMPLATE
5656 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5657 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5658 emit_block_move (blktramp, assemble_trampoline_template (),
5659 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5660 #endif
5661 trampolines_created = 1;
5662 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5664 return const0_rtx;
5667 static rtx
5668 expand_builtin_adjust_trampoline (tree exp)
5670 rtx tramp;
5672 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5673 return NULL_RTX;
5675 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5676 tramp = round_trampoline_addr (tramp);
5677 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5678 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5679 #endif
5681 return tramp;
5684 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5685 function. The function first checks whether the back end provides
5686 an insn to implement signbit for the respective mode. If not, it
5687 checks whether the floating point format of the value is such that
5688 the sign bit can be extracted. If that is not the case, the
5689 function returns NULL_RTX to indicate that a normal call should be
5690 emitted rather than expanding the function in-line. EXP is the
5691 expression that is a call to the builtin function; if convenient,
5692 the result should be placed in TARGET. */
5693 static rtx
5694 expand_builtin_signbit (tree exp, rtx target)
5696 const struct real_format *fmt;
5697 enum machine_mode fmode, imode, rmode;
5698 HOST_WIDE_INT hi, lo;
5699 tree arg;
5700 int word, bitpos;
5701 enum insn_code icode;
5702 rtx temp;
5704 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5705 return NULL_RTX;
5707 arg = CALL_EXPR_ARG (exp, 0);
5708 fmode = TYPE_MODE (TREE_TYPE (arg));
5709 rmode = TYPE_MODE (TREE_TYPE (exp));
5710 fmt = REAL_MODE_FORMAT (fmode);
5712 arg = builtin_save_expr (arg);
5714 /* Expand the argument yielding a RTX expression. */
5715 temp = expand_normal (arg);
5717 /* Check if the back end provides an insn that handles signbit for the
5718 argument's mode. */
5719 icode = signbit_optab->handlers [(int) fmode].insn_code;
5720 if (icode != CODE_FOR_nothing)
5722 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5723 emit_unop_insn (icode, target, temp, UNKNOWN);
5724 return target;
5727 /* For floating point formats without a sign bit, implement signbit
5728 as "ARG < 0.0". */
5729 bitpos = fmt->signbit_ro;
5730 if (bitpos < 0)
5732 /* But we can't do this if the format supports signed zero. */
5733 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5734 return NULL_RTX;
5736 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5737 build_real (TREE_TYPE (arg), dconst0));
5738 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5741 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5743 imode = int_mode_for_mode (fmode);
5744 if (imode == BLKmode)
5745 return NULL_RTX;
5746 temp = gen_lowpart (imode, temp);
5748 else
5750 imode = word_mode;
5751 /* Handle targets with different FP word orders. */
5752 if (FLOAT_WORDS_BIG_ENDIAN)
5753 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5754 else
5755 word = bitpos / BITS_PER_WORD;
5756 temp = operand_subword_force (temp, word, fmode);
5757 bitpos = bitpos % BITS_PER_WORD;
5760 /* Force the intermediate word_mode (or narrower) result into a
5761 register. This avoids attempting to create paradoxical SUBREGs
5762 of floating point modes below. */
5763 temp = force_reg (imode, temp);
5765 /* If the bitpos is within the "result mode" lowpart, the operation
5766 can be implement with a single bitwise AND. Otherwise, we need
5767 a right shift and an AND. */
5769 if (bitpos < GET_MODE_BITSIZE (rmode))
5771 if (bitpos < HOST_BITS_PER_WIDE_INT)
5773 hi = 0;
5774 lo = (HOST_WIDE_INT) 1 << bitpos;
5776 else
5778 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5779 lo = 0;
5782 if (imode != rmode)
5783 temp = gen_lowpart (rmode, temp);
5784 temp = expand_binop (rmode, and_optab, temp,
5785 immed_double_const (lo, hi, rmode),
5786 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5788 else
5790 /* Perform a logical right shift to place the signbit in the least
5791 significant bit, then truncate the result to the desired mode
5792 and mask just this bit. */
5793 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5794 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5795 temp = gen_lowpart (rmode, temp);
5796 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5797 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5800 return temp;
5803 /* Expand fork or exec calls. TARGET is the desired target of the
5804 call. EXP is the call. FN is the
5805 identificator of the actual function. IGNORE is nonzero if the
5806 value is to be ignored. */
5808 static rtx
5809 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5811 tree id, decl;
5812 tree call;
5814 /* If we are not profiling, just call the function. */
5815 if (!profile_arc_flag)
5816 return NULL_RTX;
5818 /* Otherwise call the wrapper. This should be equivalent for the rest of
5819 compiler, so the code does not diverge, and the wrapper may run the
5820 code necessary for keeping the profiling sane. */
5822 switch (DECL_FUNCTION_CODE (fn))
5824 case BUILT_IN_FORK:
5825 id = get_identifier ("__gcov_fork");
5826 break;
5828 case BUILT_IN_EXECL:
5829 id = get_identifier ("__gcov_execl");
5830 break;
5832 case BUILT_IN_EXECV:
5833 id = get_identifier ("__gcov_execv");
5834 break;
5836 case BUILT_IN_EXECLP:
5837 id = get_identifier ("__gcov_execlp");
5838 break;
5840 case BUILT_IN_EXECLE:
5841 id = get_identifier ("__gcov_execle");
5842 break;
5844 case BUILT_IN_EXECVP:
5845 id = get_identifier ("__gcov_execvp");
5846 break;
5848 case BUILT_IN_EXECVE:
5849 id = get_identifier ("__gcov_execve");
5850 break;
5852 default:
5853 gcc_unreachable ();
5856 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5857 DECL_EXTERNAL (decl) = 1;
5858 TREE_PUBLIC (decl) = 1;
5859 DECL_ARTIFICIAL (decl) = 1;
5860 TREE_NOTHROW (decl) = 1;
5861 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5862 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5863 call = rewrite_call_expr (exp, 0, decl, 0);
5864 return expand_call (call, target, ignore);
5869 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5870 the pointer in these functions is void*, the tree optimizers may remove
5871 casts. The mode computed in expand_builtin isn't reliable either, due
5872 to __sync_bool_compare_and_swap.
5874 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5875 group of builtins. This gives us log2 of the mode size. */
5877 static inline enum machine_mode
5878 get_builtin_sync_mode (int fcode_diff)
5880 /* The size is not negotiable, so ask not to get BLKmode in return
5881 if the target indicates that a smaller size would be better. */
5882 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5885 /* Expand the memory expression LOC and return the appropriate memory operand
5886 for the builtin_sync operations. */
5888 static rtx
5889 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5891 rtx addr, mem;
5893 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5895 /* Note that we explicitly do not want any alias information for this
5896 memory, so that we kill all other live memories. Otherwise we don't
5897 satisfy the full barrier semantics of the intrinsic. */
5898 mem = validize_mem (gen_rtx_MEM (mode, addr));
5900 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5901 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5902 MEM_VOLATILE_P (mem) = 1;
5904 return mem;
5907 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5908 EXP is the CALL_EXPR. CODE is the rtx code
5909 that corresponds to the arithmetic or logical operation from the name;
5910 an exception here is that NOT actually means NAND. TARGET is an optional
5911 place for us to store the results; AFTER is true if this is the
5912 fetch_and_xxx form. IGNORE is true if we don't actually care about
5913 the result of the operation at all. */
5915 static rtx
5916 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5917 enum rtx_code code, bool after,
5918 rtx target, bool ignore)
5920 rtx val, mem;
5921 enum machine_mode old_mode;
5923 /* Expand the operands. */
5924 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5926 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5927 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5928 of CONST_INTs, where we know the old_mode only from the call argument. */
5929 old_mode = GET_MODE (val);
5930 if (old_mode == VOIDmode)
5931 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5932 val = convert_modes (mode, old_mode, val, 1);
5934 if (ignore)
5935 return expand_sync_operation (mem, val, code);
5936 else
5937 return expand_sync_fetch_operation (mem, val, code, after, target);
5940 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5941 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5942 true if this is the boolean form. TARGET is a place for us to store the
5943 results; this is NOT optional if IS_BOOL is true. */
5945 static rtx
5946 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5947 bool is_bool, rtx target)
5949 rtx old_val, new_val, mem;
5950 enum machine_mode old_mode;
5952 /* Expand the operands. */
5953 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5956 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5957 mode, EXPAND_NORMAL);
5958 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5959 of CONST_INTs, where we know the old_mode only from the call argument. */
5960 old_mode = GET_MODE (old_val);
5961 if (old_mode == VOIDmode)
5962 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5963 old_val = convert_modes (mode, old_mode, old_val, 1);
5965 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5966 mode, EXPAND_NORMAL);
5967 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5968 of CONST_INTs, where we know the old_mode only from the call argument. */
5969 old_mode = GET_MODE (new_val);
5970 if (old_mode == VOIDmode)
5971 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5972 new_val = convert_modes (mode, old_mode, new_val, 1);
5974 if (is_bool)
5975 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5976 else
5977 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5980 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5981 general form is actually an atomic exchange, and some targets only
5982 support a reduced form with the second argument being a constant 1.
5983 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5984 the results. */
5986 static rtx
5987 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5988 rtx target)
5990 rtx val, mem;
5991 enum machine_mode old_mode;
5993 /* Expand the operands. */
5994 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5995 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5996 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5997 of CONST_INTs, where we know the old_mode only from the call argument. */
5998 old_mode = GET_MODE (val);
5999 if (old_mode == VOIDmode)
6000 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6001 val = convert_modes (mode, old_mode, val, 1);
6003 return expand_sync_lock_test_and_set (mem, val, target);
6006 /* Expand the __sync_synchronize intrinsic. */
6008 static void
6009 expand_builtin_synchronize (void)
6011 tree x;
6013 #ifdef HAVE_memory_barrier
6014 if (HAVE_memory_barrier)
6016 emit_insn (gen_memory_barrier ());
6017 return;
6019 #endif
6021 /* If no explicit memory barrier instruction is available, create an
6022 empty asm stmt with a memory clobber. */
6023 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6024 tree_cons (NULL, build_string (6, "memory"), NULL));
6025 ASM_VOLATILE_P (x) = 1;
6026 expand_asm_expr (x);
6029 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6031 static void
6032 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6034 enum insn_code icode;
6035 rtx mem, insn;
6036 rtx val = const0_rtx;
6038 /* Expand the operands. */
6039 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6041 /* If there is an explicit operation in the md file, use it. */
6042 icode = sync_lock_release[mode];
6043 if (icode != CODE_FOR_nothing)
6045 if (!insn_data[icode].operand[1].predicate (val, mode))
6046 val = force_reg (mode, val);
6048 insn = GEN_FCN (icode) (mem, val);
6049 if (insn)
6051 emit_insn (insn);
6052 return;
6056 /* Otherwise we can implement this operation by emitting a barrier
6057 followed by a store of zero. */
6058 expand_builtin_synchronize ();
6059 emit_move_insn (mem, val);
6062 /* Expand an expression EXP that calls a built-in function,
6063 with result going to TARGET if that's convenient
6064 (and in mode MODE if that's convenient).
6065 SUBTARGET may be used as the target for computing one of EXP's operands.
6066 IGNORE is nonzero if the value is to be ignored. */
6069 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6070 int ignore)
6072 tree fndecl = get_callee_fndecl (exp);
6073 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6074 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6076 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6077 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6079 /* When not optimizing, generate calls to library functions for a certain
6080 set of builtins. */
6081 if (!optimize
6082 && !called_as_built_in (fndecl)
6083 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6084 && fcode != BUILT_IN_ALLOCA)
6085 return expand_call (exp, target, ignore);
6087 /* The built-in function expanders test for target == const0_rtx
6088 to determine whether the function's result will be ignored. */
6089 if (ignore)
6090 target = const0_rtx;
6092 /* If the result of a pure or const built-in function is ignored, and
6093 none of its arguments are volatile, we can avoid expanding the
6094 built-in call and just evaluate the arguments for side-effects. */
6095 if (target == const0_rtx
6096 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
6098 bool volatilep = false;
6099 tree arg;
6100 call_expr_arg_iterator iter;
6102 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6103 if (TREE_THIS_VOLATILE (arg))
6105 volatilep = true;
6106 break;
6109 if (! volatilep)
6111 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6112 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6113 return const0_rtx;
6117 switch (fcode)
6119 CASE_FLT_FN (BUILT_IN_FABS):
6120 target = expand_builtin_fabs (exp, target, subtarget);
6121 if (target)
6122 return target;
6123 break;
6125 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6126 target = expand_builtin_copysign (exp, target, subtarget);
6127 if (target)
6128 return target;
6129 break;
6131 /* Just do a normal library call if we were unable to fold
6132 the values. */
6133 CASE_FLT_FN (BUILT_IN_CABS):
6134 break;
6136 CASE_FLT_FN (BUILT_IN_EXP):
6137 CASE_FLT_FN (BUILT_IN_EXP10):
6138 CASE_FLT_FN (BUILT_IN_POW10):
6139 CASE_FLT_FN (BUILT_IN_EXP2):
6140 CASE_FLT_FN (BUILT_IN_EXPM1):
6141 CASE_FLT_FN (BUILT_IN_LOGB):
6142 CASE_FLT_FN (BUILT_IN_LOG):
6143 CASE_FLT_FN (BUILT_IN_LOG10):
6144 CASE_FLT_FN (BUILT_IN_LOG2):
6145 CASE_FLT_FN (BUILT_IN_LOG1P):
6146 CASE_FLT_FN (BUILT_IN_TAN):
6147 CASE_FLT_FN (BUILT_IN_ASIN):
6148 CASE_FLT_FN (BUILT_IN_ACOS):
6149 CASE_FLT_FN (BUILT_IN_ATAN):
6150 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6151 because of possible accuracy problems. */
6152 if (! flag_unsafe_math_optimizations)
6153 break;
6154 CASE_FLT_FN (BUILT_IN_SQRT):
6155 CASE_FLT_FN (BUILT_IN_FLOOR):
6156 CASE_FLT_FN (BUILT_IN_CEIL):
6157 CASE_FLT_FN (BUILT_IN_TRUNC):
6158 CASE_FLT_FN (BUILT_IN_ROUND):
6159 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6160 CASE_FLT_FN (BUILT_IN_RINT):
6161 target = expand_builtin_mathfn (exp, target, subtarget);
6162 if (target)
6163 return target;
6164 break;
6166 CASE_FLT_FN (BUILT_IN_ILOGB):
6167 if (! flag_unsafe_math_optimizations)
6168 break;
6169 CASE_FLT_FN (BUILT_IN_ISINF):
6170 CASE_FLT_FN (BUILT_IN_FINITE):
6171 case BUILT_IN_ISFINITE:
6172 case BUILT_IN_ISNORMAL:
6173 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6174 if (target)
6175 return target;
6176 break;
6178 CASE_FLT_FN (BUILT_IN_LCEIL):
6179 CASE_FLT_FN (BUILT_IN_LLCEIL):
6180 CASE_FLT_FN (BUILT_IN_LFLOOR):
6181 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6182 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6183 if (target)
6184 return target;
6185 break;
6187 CASE_FLT_FN (BUILT_IN_LRINT):
6188 CASE_FLT_FN (BUILT_IN_LLRINT):
6189 CASE_FLT_FN (BUILT_IN_LROUND):
6190 CASE_FLT_FN (BUILT_IN_LLROUND):
6191 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6192 if (target)
6193 return target;
6194 break;
6196 CASE_FLT_FN (BUILT_IN_POW):
6197 target = expand_builtin_pow (exp, target, subtarget);
6198 if (target)
6199 return target;
6200 break;
6202 CASE_FLT_FN (BUILT_IN_POWI):
6203 target = expand_builtin_powi (exp, target, subtarget);
6204 if (target)
6205 return target;
6206 break;
6208 CASE_FLT_FN (BUILT_IN_ATAN2):
6209 CASE_FLT_FN (BUILT_IN_LDEXP):
6210 CASE_FLT_FN (BUILT_IN_SCALB):
6211 CASE_FLT_FN (BUILT_IN_SCALBN):
6212 CASE_FLT_FN (BUILT_IN_SCALBLN):
6213 if (! flag_unsafe_math_optimizations)
6214 break;
6216 CASE_FLT_FN (BUILT_IN_FMOD):
6217 CASE_FLT_FN (BUILT_IN_REMAINDER):
6218 CASE_FLT_FN (BUILT_IN_DREM):
6219 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6220 if (target)
6221 return target;
6222 break;
6224 CASE_FLT_FN (BUILT_IN_CEXPI):
6225 target = expand_builtin_cexpi (exp, target, subtarget);
6226 gcc_assert (target);
6227 return target;
6229 CASE_FLT_FN (BUILT_IN_SIN):
6230 CASE_FLT_FN (BUILT_IN_COS):
6231 if (! flag_unsafe_math_optimizations)
6232 break;
6233 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6234 if (target)
6235 return target;
6236 break;
6238 CASE_FLT_FN (BUILT_IN_SINCOS):
6239 if (! flag_unsafe_math_optimizations)
6240 break;
6241 target = expand_builtin_sincos (exp);
6242 if (target)
6243 return target;
6244 break;
6246 case BUILT_IN_APPLY_ARGS:
6247 return expand_builtin_apply_args ();
6249 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6250 FUNCTION with a copy of the parameters described by
6251 ARGUMENTS, and ARGSIZE. It returns a block of memory
6252 allocated on the stack into which is stored all the registers
6253 that might possibly be used for returning the result of a
6254 function. ARGUMENTS is the value returned by
6255 __builtin_apply_args. ARGSIZE is the number of bytes of
6256 arguments that must be copied. ??? How should this value be
6257 computed? We'll also need a safe worst case value for varargs
6258 functions. */
6259 case BUILT_IN_APPLY:
6260 if (!validate_arglist (exp, POINTER_TYPE,
6261 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6262 && !validate_arglist (exp, REFERENCE_TYPE,
6263 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6264 return const0_rtx;
6265 else
6267 rtx ops[3];
6269 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6270 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6271 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6273 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6276 /* __builtin_return (RESULT) causes the function to return the
6277 value described by RESULT. RESULT is address of the block of
6278 memory returned by __builtin_apply. */
6279 case BUILT_IN_RETURN:
6280 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6281 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6282 return const0_rtx;
6284 case BUILT_IN_SAVEREGS:
6285 return expand_builtin_saveregs ();
6287 case BUILT_IN_ARGS_INFO:
6288 return expand_builtin_args_info (exp);
6290 case BUILT_IN_VA_ARG_PACK:
6291 /* All valid uses of __builtin_va_arg_pack () are removed during
6292 inlining. */
6293 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6294 return const0_rtx;
6296 case BUILT_IN_VA_ARG_PACK_LEN:
6297 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6298 inlining. */
6299 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6300 return const0_rtx;
6302 /* Return the address of the first anonymous stack arg. */
6303 case BUILT_IN_NEXT_ARG:
6304 if (fold_builtin_next_arg (exp, false))
6305 return const0_rtx;
6306 return expand_builtin_next_arg ();
6308 case BUILT_IN_CLEAR_CACHE:
6309 target = expand_builtin___clear_cache (exp);
6310 if (target)
6311 return target;
6312 break;
6314 case BUILT_IN_CLASSIFY_TYPE:
6315 return expand_builtin_classify_type (exp);
6317 case BUILT_IN_CONSTANT_P:
6318 return const0_rtx;
6320 case BUILT_IN_FRAME_ADDRESS:
6321 case BUILT_IN_RETURN_ADDRESS:
6322 return expand_builtin_frame_address (fndecl, exp);
6324 /* Returns the address of the area where the structure is returned.
6325 0 otherwise. */
6326 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6327 if (call_expr_nargs (exp) != 0
6328 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6329 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6330 return const0_rtx;
6331 else
6332 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6334 case BUILT_IN_ALLOCA:
6335 target = expand_builtin_alloca (exp, target);
6336 if (target)
6337 return target;
6338 break;
6340 case BUILT_IN_STACK_SAVE:
6341 return expand_stack_save ();
6343 case BUILT_IN_STACK_RESTORE:
6344 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6345 return const0_rtx;
6347 case BUILT_IN_BSWAP32:
6348 case BUILT_IN_BSWAP64:
6349 target = expand_builtin_bswap (exp, target, subtarget);
6351 if (target)
6352 return target;
6353 break;
6355 CASE_INT_FN (BUILT_IN_FFS):
6356 case BUILT_IN_FFSIMAX:
6357 target = expand_builtin_unop (target_mode, exp, target,
6358 subtarget, ffs_optab);
6359 if (target)
6360 return target;
6361 break;
6363 CASE_INT_FN (BUILT_IN_CLZ):
6364 case BUILT_IN_CLZIMAX:
6365 target = expand_builtin_unop (target_mode, exp, target,
6366 subtarget, clz_optab);
6367 if (target)
6368 return target;
6369 break;
6371 CASE_INT_FN (BUILT_IN_CTZ):
6372 case BUILT_IN_CTZIMAX:
6373 target = expand_builtin_unop (target_mode, exp, target,
6374 subtarget, ctz_optab);
6375 if (target)
6376 return target;
6377 break;
6379 CASE_INT_FN (BUILT_IN_POPCOUNT):
6380 case BUILT_IN_POPCOUNTIMAX:
6381 target = expand_builtin_unop (target_mode, exp, target,
6382 subtarget, popcount_optab);
6383 if (target)
6384 return target;
6385 break;
6387 CASE_INT_FN (BUILT_IN_PARITY):
6388 case BUILT_IN_PARITYIMAX:
6389 target = expand_builtin_unop (target_mode, exp, target,
6390 subtarget, parity_optab);
6391 if (target)
6392 return target;
6393 break;
6395 case BUILT_IN_STRLEN:
6396 target = expand_builtin_strlen (exp, target, target_mode);
6397 if (target)
6398 return target;
6399 break;
6401 case BUILT_IN_STRCPY:
6402 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6403 if (target)
6404 return target;
6405 break;
6407 case BUILT_IN_STRNCPY:
6408 target = expand_builtin_strncpy (exp, target, mode);
6409 if (target)
6410 return target;
6411 break;
6413 case BUILT_IN_STPCPY:
6414 target = expand_builtin_stpcpy (exp, target, mode);
6415 if (target)
6416 return target;
6417 break;
6419 case BUILT_IN_STRCAT:
6420 target = expand_builtin_strcat (fndecl, exp, target, mode);
6421 if (target)
6422 return target;
6423 break;
6425 case BUILT_IN_STRNCAT:
6426 target = expand_builtin_strncat (exp, target, mode);
6427 if (target)
6428 return target;
6429 break;
6431 case BUILT_IN_STRSPN:
6432 target = expand_builtin_strspn (exp, target, mode);
6433 if (target)
6434 return target;
6435 break;
6437 case BUILT_IN_STRCSPN:
6438 target = expand_builtin_strcspn (exp, target, mode);
6439 if (target)
6440 return target;
6441 break;
6443 case BUILT_IN_STRSTR:
6444 target = expand_builtin_strstr (exp, target, mode);
6445 if (target)
6446 return target;
6447 break;
6449 case BUILT_IN_STRPBRK:
6450 target = expand_builtin_strpbrk (exp, target, mode);
6451 if (target)
6452 return target;
6453 break;
6455 case BUILT_IN_INDEX:
6456 case BUILT_IN_STRCHR:
6457 target = expand_builtin_strchr (exp, target, mode);
6458 if (target)
6459 return target;
6460 break;
6462 case BUILT_IN_RINDEX:
6463 case BUILT_IN_STRRCHR:
6464 target = expand_builtin_strrchr (exp, target, mode);
6465 if (target)
6466 return target;
6467 break;
6469 case BUILT_IN_MEMCPY:
6470 target = expand_builtin_memcpy (exp, target, mode);
6471 if (target)
6472 return target;
6473 break;
6475 case BUILT_IN_MEMPCPY:
6476 target = expand_builtin_mempcpy (exp, target, mode);
6477 if (target)
6478 return target;
6479 break;
6481 case BUILT_IN_MEMMOVE:
6482 target = expand_builtin_memmove (exp, target, mode, ignore);
6483 if (target)
6484 return target;
6485 break;
6487 case BUILT_IN_BCOPY:
6488 target = expand_builtin_bcopy (exp, ignore);
6489 if (target)
6490 return target;
6491 break;
6493 case BUILT_IN_MEMSET:
6494 target = expand_builtin_memset (exp, target, mode);
6495 if (target)
6496 return target;
6497 break;
6499 case BUILT_IN_BZERO:
6500 target = expand_builtin_bzero (exp);
6501 if (target)
6502 return target;
6503 break;
6505 case BUILT_IN_STRCMP:
6506 target = expand_builtin_strcmp (exp, target, mode);
6507 if (target)
6508 return target;
6509 break;
6511 case BUILT_IN_STRNCMP:
6512 target = expand_builtin_strncmp (exp, target, mode);
6513 if (target)
6514 return target;
6515 break;
6517 case BUILT_IN_MEMCHR:
6518 target = expand_builtin_memchr (exp, target, mode);
6519 if (target)
6520 return target;
6521 break;
6523 case BUILT_IN_BCMP:
6524 case BUILT_IN_MEMCMP:
6525 target = expand_builtin_memcmp (exp, target, mode);
6526 if (target)
6527 return target;
6528 break;
6530 case BUILT_IN_SETJMP:
6531 /* This should have been lowered to the builtins below. */
6532 gcc_unreachable ();
6534 case BUILT_IN_SETJMP_SETUP:
6535 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6536 and the receiver label. */
6537 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6539 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6540 VOIDmode, EXPAND_NORMAL);
6541 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6542 rtx label_r = label_rtx (label);
6544 /* This is copied from the handling of non-local gotos. */
6545 expand_builtin_setjmp_setup (buf_addr, label_r);
6546 nonlocal_goto_handler_labels
6547 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6548 nonlocal_goto_handler_labels);
6549 /* ??? Do not let expand_label treat us as such since we would
6550 not want to be both on the list of non-local labels and on
6551 the list of forced labels. */
6552 FORCED_LABEL (label) = 0;
6553 return const0_rtx;
6555 break;
6557 case BUILT_IN_SETJMP_DISPATCHER:
6558 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6559 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6561 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6562 rtx label_r = label_rtx (label);
6564 /* Remove the dispatcher label from the list of non-local labels
6565 since the receiver labels have been added to it above. */
6566 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6567 return const0_rtx;
6569 break;
6571 case BUILT_IN_SETJMP_RECEIVER:
6572 /* __builtin_setjmp_receiver is passed the receiver label. */
6573 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6575 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6576 rtx label_r = label_rtx (label);
6578 expand_builtin_setjmp_receiver (label_r);
6579 return const0_rtx;
6581 break;
6583 /* __builtin_longjmp is passed a pointer to an array of five words.
6584 It's similar to the C library longjmp function but works with
6585 __builtin_setjmp above. */
6586 case BUILT_IN_LONGJMP:
6587 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6589 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6590 VOIDmode, EXPAND_NORMAL);
6591 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6593 if (value != const1_rtx)
6595 error ("%<__builtin_longjmp%> second argument must be 1");
6596 return const0_rtx;
6599 expand_builtin_longjmp (buf_addr, value);
6600 return const0_rtx;
6602 break;
6604 case BUILT_IN_NONLOCAL_GOTO:
6605 target = expand_builtin_nonlocal_goto (exp);
6606 if (target)
6607 return target;
6608 break;
6610 /* This updates the setjmp buffer that is its argument with the value
6611 of the current stack pointer. */
6612 case BUILT_IN_UPDATE_SETJMP_BUF:
6613 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6615 rtx buf_addr
6616 = expand_normal (CALL_EXPR_ARG (exp, 0));
6618 expand_builtin_update_setjmp_buf (buf_addr);
6619 return const0_rtx;
6621 break;
6623 case BUILT_IN_TRAP:
6624 expand_builtin_trap ();
6625 return const0_rtx;
6627 case BUILT_IN_PRINTF:
6628 target = expand_builtin_printf (exp, target, mode, false);
6629 if (target)
6630 return target;
6631 break;
6633 case BUILT_IN_PRINTF_UNLOCKED:
6634 target = expand_builtin_printf (exp, target, mode, true);
6635 if (target)
6636 return target;
6637 break;
6639 case BUILT_IN_FPUTS:
6640 target = expand_builtin_fputs (exp, target, false);
6641 if (target)
6642 return target;
6643 break;
6644 case BUILT_IN_FPUTS_UNLOCKED:
6645 target = expand_builtin_fputs (exp, target, true);
6646 if (target)
6647 return target;
6648 break;
6650 case BUILT_IN_FPRINTF:
6651 target = expand_builtin_fprintf (exp, target, mode, false);
6652 if (target)
6653 return target;
6654 break;
6656 case BUILT_IN_FPRINTF_UNLOCKED:
6657 target = expand_builtin_fprintf (exp, target, mode, true);
6658 if (target)
6659 return target;
6660 break;
6662 case BUILT_IN_SPRINTF:
6663 target = expand_builtin_sprintf (exp, target, mode);
6664 if (target)
6665 return target;
6666 break;
6668 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6669 case BUILT_IN_SIGNBITD32:
6670 case BUILT_IN_SIGNBITD64:
6671 case BUILT_IN_SIGNBITD128:
6672 target = expand_builtin_signbit (exp, target);
6673 if (target)
6674 return target;
6675 break;
6677 /* Various hooks for the DWARF 2 __throw routine. */
6678 case BUILT_IN_UNWIND_INIT:
6679 expand_builtin_unwind_init ();
6680 return const0_rtx;
6681 case BUILT_IN_DWARF_CFA:
6682 return virtual_cfa_rtx;
6683 #ifdef DWARF2_UNWIND_INFO
6684 case BUILT_IN_DWARF_SP_COLUMN:
6685 return expand_builtin_dwarf_sp_column ();
6686 case BUILT_IN_INIT_DWARF_REG_SIZES:
6687 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6688 return const0_rtx;
6689 #endif
6690 case BUILT_IN_FROB_RETURN_ADDR:
6691 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6692 case BUILT_IN_EXTRACT_RETURN_ADDR:
6693 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6694 case BUILT_IN_EH_RETURN:
6695 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6696 CALL_EXPR_ARG (exp, 1));
6697 return const0_rtx;
6698 #ifdef EH_RETURN_DATA_REGNO
6699 case BUILT_IN_EH_RETURN_DATA_REGNO:
6700 return expand_builtin_eh_return_data_regno (exp);
6701 #endif
6702 case BUILT_IN_EXTEND_POINTER:
6703 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6705 case BUILT_IN_VA_START:
6706 return expand_builtin_va_start (exp);
6707 case BUILT_IN_VA_END:
6708 return expand_builtin_va_end (exp);
6709 case BUILT_IN_VA_COPY:
6710 return expand_builtin_va_copy (exp);
6711 case BUILT_IN_EXPECT:
6712 return expand_builtin_expect (exp, target);
6713 case BUILT_IN_PREFETCH:
6714 expand_builtin_prefetch (exp);
6715 return const0_rtx;
6717 case BUILT_IN_PROFILE_FUNC_ENTER:
6718 return expand_builtin_profile_func (false);
6719 case BUILT_IN_PROFILE_FUNC_EXIT:
6720 return expand_builtin_profile_func (true);
6722 case BUILT_IN_INIT_TRAMPOLINE:
6723 return expand_builtin_init_trampoline (exp);
6724 case BUILT_IN_ADJUST_TRAMPOLINE:
6725 return expand_builtin_adjust_trampoline (exp);
6727 case BUILT_IN_FORK:
6728 case BUILT_IN_EXECL:
6729 case BUILT_IN_EXECV:
6730 case BUILT_IN_EXECLP:
6731 case BUILT_IN_EXECLE:
6732 case BUILT_IN_EXECVP:
6733 case BUILT_IN_EXECVE:
6734 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6735 if (target)
6736 return target;
6737 break;
6739 case BUILT_IN_FETCH_AND_ADD_1:
6740 case BUILT_IN_FETCH_AND_ADD_2:
6741 case BUILT_IN_FETCH_AND_ADD_4:
6742 case BUILT_IN_FETCH_AND_ADD_8:
6743 case BUILT_IN_FETCH_AND_ADD_16:
6744 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6745 target = expand_builtin_sync_operation (mode, exp, PLUS,
6746 false, target, ignore);
6747 if (target)
6748 return target;
6749 break;
6751 case BUILT_IN_FETCH_AND_SUB_1:
6752 case BUILT_IN_FETCH_AND_SUB_2:
6753 case BUILT_IN_FETCH_AND_SUB_4:
6754 case BUILT_IN_FETCH_AND_SUB_8:
6755 case BUILT_IN_FETCH_AND_SUB_16:
6756 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6757 target = expand_builtin_sync_operation (mode, exp, MINUS,
6758 false, target, ignore);
6759 if (target)
6760 return target;
6761 break;
6763 case BUILT_IN_FETCH_AND_OR_1:
6764 case BUILT_IN_FETCH_AND_OR_2:
6765 case BUILT_IN_FETCH_AND_OR_4:
6766 case BUILT_IN_FETCH_AND_OR_8:
6767 case BUILT_IN_FETCH_AND_OR_16:
6768 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6769 target = expand_builtin_sync_operation (mode, exp, IOR,
6770 false, target, ignore);
6771 if (target)
6772 return target;
6773 break;
6775 case BUILT_IN_FETCH_AND_AND_1:
6776 case BUILT_IN_FETCH_AND_AND_2:
6777 case BUILT_IN_FETCH_AND_AND_4:
6778 case BUILT_IN_FETCH_AND_AND_8:
6779 case BUILT_IN_FETCH_AND_AND_16:
6780 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6781 target = expand_builtin_sync_operation (mode, exp, AND,
6782 false, target, ignore);
6783 if (target)
6784 return target;
6785 break;
6787 case BUILT_IN_FETCH_AND_XOR_1:
6788 case BUILT_IN_FETCH_AND_XOR_2:
6789 case BUILT_IN_FETCH_AND_XOR_4:
6790 case BUILT_IN_FETCH_AND_XOR_8:
6791 case BUILT_IN_FETCH_AND_XOR_16:
6792 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6793 target = expand_builtin_sync_operation (mode, exp, XOR,
6794 false, target, ignore);
6795 if (target)
6796 return target;
6797 break;
6799 case BUILT_IN_FETCH_AND_NAND_1:
6800 case BUILT_IN_FETCH_AND_NAND_2:
6801 case BUILT_IN_FETCH_AND_NAND_4:
6802 case BUILT_IN_FETCH_AND_NAND_8:
6803 case BUILT_IN_FETCH_AND_NAND_16:
6804 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6805 target = expand_builtin_sync_operation (mode, exp, NOT,
6806 false, target, ignore);
6807 if (target)
6808 return target;
6809 break;
6811 case BUILT_IN_ADD_AND_FETCH_1:
6812 case BUILT_IN_ADD_AND_FETCH_2:
6813 case BUILT_IN_ADD_AND_FETCH_4:
6814 case BUILT_IN_ADD_AND_FETCH_8:
6815 case BUILT_IN_ADD_AND_FETCH_16:
6816 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6817 target = expand_builtin_sync_operation (mode, exp, PLUS,
6818 true, target, ignore);
6819 if (target)
6820 return target;
6821 break;
6823 case BUILT_IN_SUB_AND_FETCH_1:
6824 case BUILT_IN_SUB_AND_FETCH_2:
6825 case BUILT_IN_SUB_AND_FETCH_4:
6826 case BUILT_IN_SUB_AND_FETCH_8:
6827 case BUILT_IN_SUB_AND_FETCH_16:
6828 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6829 target = expand_builtin_sync_operation (mode, exp, MINUS,
6830 true, target, ignore);
6831 if (target)
6832 return target;
6833 break;
6835 case BUILT_IN_OR_AND_FETCH_1:
6836 case BUILT_IN_OR_AND_FETCH_2:
6837 case BUILT_IN_OR_AND_FETCH_4:
6838 case BUILT_IN_OR_AND_FETCH_8:
6839 case BUILT_IN_OR_AND_FETCH_16:
6840 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6841 target = expand_builtin_sync_operation (mode, exp, IOR,
6842 true, target, ignore);
6843 if (target)
6844 return target;
6845 break;
6847 case BUILT_IN_AND_AND_FETCH_1:
6848 case BUILT_IN_AND_AND_FETCH_2:
6849 case BUILT_IN_AND_AND_FETCH_4:
6850 case BUILT_IN_AND_AND_FETCH_8:
6851 case BUILT_IN_AND_AND_FETCH_16:
6852 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6853 target = expand_builtin_sync_operation (mode, exp, AND,
6854 true, target, ignore);
6855 if (target)
6856 return target;
6857 break;
6859 case BUILT_IN_XOR_AND_FETCH_1:
6860 case BUILT_IN_XOR_AND_FETCH_2:
6861 case BUILT_IN_XOR_AND_FETCH_4:
6862 case BUILT_IN_XOR_AND_FETCH_8:
6863 case BUILT_IN_XOR_AND_FETCH_16:
6864 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6865 target = expand_builtin_sync_operation (mode, exp, XOR,
6866 true, target, ignore);
6867 if (target)
6868 return target;
6869 break;
6871 case BUILT_IN_NAND_AND_FETCH_1:
6872 case BUILT_IN_NAND_AND_FETCH_2:
6873 case BUILT_IN_NAND_AND_FETCH_4:
6874 case BUILT_IN_NAND_AND_FETCH_8:
6875 case BUILT_IN_NAND_AND_FETCH_16:
6876 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6877 target = expand_builtin_sync_operation (mode, exp, NOT,
6878 true, target, ignore);
6879 if (target)
6880 return target;
6881 break;
6883 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6884 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6885 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6886 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6887 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6888 if (mode == VOIDmode)
6889 mode = TYPE_MODE (boolean_type_node);
6890 if (!target || !register_operand (target, mode))
6891 target = gen_reg_rtx (mode);
6893 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6894 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6895 if (target)
6896 return target;
6897 break;
6899 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6900 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6901 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6902 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6903 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6904 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6905 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6906 if (target)
6907 return target;
6908 break;
6910 case BUILT_IN_LOCK_TEST_AND_SET_1:
6911 case BUILT_IN_LOCK_TEST_AND_SET_2:
6912 case BUILT_IN_LOCK_TEST_AND_SET_4:
6913 case BUILT_IN_LOCK_TEST_AND_SET_8:
6914 case BUILT_IN_LOCK_TEST_AND_SET_16:
6915 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6916 target = expand_builtin_lock_test_and_set (mode, exp, target);
6917 if (target)
6918 return target;
6919 break;
6921 case BUILT_IN_LOCK_RELEASE_1:
6922 case BUILT_IN_LOCK_RELEASE_2:
6923 case BUILT_IN_LOCK_RELEASE_4:
6924 case BUILT_IN_LOCK_RELEASE_8:
6925 case BUILT_IN_LOCK_RELEASE_16:
6926 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6927 expand_builtin_lock_release (mode, exp);
6928 return const0_rtx;
6930 case BUILT_IN_SYNCHRONIZE:
6931 expand_builtin_synchronize ();
6932 return const0_rtx;
6934 case BUILT_IN_OBJECT_SIZE:
6935 return expand_builtin_object_size (exp);
6937 case BUILT_IN_MEMCPY_CHK:
6938 case BUILT_IN_MEMPCPY_CHK:
6939 case BUILT_IN_MEMMOVE_CHK:
6940 case BUILT_IN_MEMSET_CHK:
6941 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6942 if (target)
6943 return target;
6944 break;
6946 case BUILT_IN_STRCPY_CHK:
6947 case BUILT_IN_STPCPY_CHK:
6948 case BUILT_IN_STRNCPY_CHK:
6949 case BUILT_IN_STRCAT_CHK:
6950 case BUILT_IN_STRNCAT_CHK:
6951 case BUILT_IN_SNPRINTF_CHK:
6952 case BUILT_IN_VSNPRINTF_CHK:
6953 maybe_emit_chk_warning (exp, fcode);
6954 break;
6956 case BUILT_IN_SPRINTF_CHK:
6957 case BUILT_IN_VSPRINTF_CHK:
6958 maybe_emit_sprintf_chk_warning (exp, fcode);
6959 break;
6961 default: /* just do library call, if unknown builtin */
6962 break;
6965 /* The switch statement above can drop through to cause the function
6966 to be called normally. */
6967 return expand_call (exp, target, ignore);
6970 /* Determine whether a tree node represents a call to a built-in
6971 function. If the tree T is a call to a built-in function with
6972 the right number of arguments of the appropriate types, return
6973 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6974 Otherwise the return value is END_BUILTINS. */
6976 enum built_in_function
6977 builtin_mathfn_code (const_tree t)
6979 const_tree fndecl, arg, parmlist;
6980 const_tree argtype, parmtype;
6981 const_call_expr_arg_iterator iter;
6983 if (TREE_CODE (t) != CALL_EXPR
6984 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6985 return END_BUILTINS;
6987 fndecl = get_callee_fndecl (t);
6988 if (fndecl == NULL_TREE
6989 || TREE_CODE (fndecl) != FUNCTION_DECL
6990 || ! DECL_BUILT_IN (fndecl)
6991 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6992 return END_BUILTINS;
6994 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6995 init_const_call_expr_arg_iterator (t, &iter);
6996 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6998 /* If a function doesn't take a variable number of arguments,
6999 the last element in the list will have type `void'. */
7000 parmtype = TREE_VALUE (parmlist);
7001 if (VOID_TYPE_P (parmtype))
7003 if (more_const_call_expr_args_p (&iter))
7004 return END_BUILTINS;
7005 return DECL_FUNCTION_CODE (fndecl);
7008 if (! more_const_call_expr_args_p (&iter))
7009 return END_BUILTINS;
7011 arg = next_const_call_expr_arg (&iter);
7012 argtype = TREE_TYPE (arg);
7014 if (SCALAR_FLOAT_TYPE_P (parmtype))
7016 if (! SCALAR_FLOAT_TYPE_P (argtype))
7017 return END_BUILTINS;
7019 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7021 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7022 return END_BUILTINS;
7024 else if (POINTER_TYPE_P (parmtype))
7026 if (! POINTER_TYPE_P (argtype))
7027 return END_BUILTINS;
7029 else if (INTEGRAL_TYPE_P (parmtype))
7031 if (! INTEGRAL_TYPE_P (argtype))
7032 return END_BUILTINS;
7034 else
7035 return END_BUILTINS;
7038 /* Variable-length argument list. */
7039 return DECL_FUNCTION_CODE (fndecl);
7042 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7043 evaluate to a constant. */
7045 static tree
7046 fold_builtin_constant_p (tree arg)
7048 /* We return 1 for a numeric type that's known to be a constant
7049 value at compile-time or for an aggregate type that's a
7050 literal constant. */
7051 STRIP_NOPS (arg);
7053 /* If we know this is a constant, emit the constant of one. */
7054 if (CONSTANT_CLASS_P (arg)
7055 || (TREE_CODE (arg) == CONSTRUCTOR
7056 && TREE_CONSTANT (arg)))
7057 return integer_one_node;
7058 if (TREE_CODE (arg) == ADDR_EXPR)
7060 tree op = TREE_OPERAND (arg, 0);
7061 if (TREE_CODE (op) == STRING_CST
7062 || (TREE_CODE (op) == ARRAY_REF
7063 && integer_zerop (TREE_OPERAND (op, 1))
7064 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7065 return integer_one_node;
7068 /* If this expression has side effects, show we don't know it to be a
7069 constant. Likewise if it's a pointer or aggregate type since in
7070 those case we only want literals, since those are only optimized
7071 when generating RTL, not later.
7072 And finally, if we are compiling an initializer, not code, we
7073 need to return a definite result now; there's not going to be any
7074 more optimization done. */
7075 if (TREE_SIDE_EFFECTS (arg)
7076 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7077 || POINTER_TYPE_P (TREE_TYPE (arg))
7078 || cfun == 0
7079 || folding_initializer)
7080 return integer_zero_node;
7082 return NULL_TREE;
7085 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7086 return it as a truthvalue. */
7088 static tree
7089 build_builtin_expect_predicate (tree pred, tree expected)
7091 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7093 fn = built_in_decls[BUILT_IN_EXPECT];
7094 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7095 ret_type = TREE_TYPE (TREE_TYPE (fn));
7096 pred_type = TREE_VALUE (arg_types);
7097 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7099 pred = fold_convert (pred_type, pred);
7100 expected = fold_convert (expected_type, expected);
7101 call_expr = build_call_expr (fn, 2, pred, expected);
7103 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7104 build_int_cst (ret_type, 0));
7107 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7108 NULL_TREE if no simplification is possible. */
7110 static tree
7111 fold_builtin_expect (tree arg0, tree arg1)
7113 tree inner, fndecl;
7114 enum tree_code code;
7116 /* If this is a builtin_expect within a builtin_expect keep the
7117 inner one. See through a comparison against a constant. It
7118 might have been added to create a thruthvalue. */
7119 inner = arg0;
7120 if (COMPARISON_CLASS_P (inner)
7121 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7122 inner = TREE_OPERAND (inner, 0);
7124 if (TREE_CODE (inner) == CALL_EXPR
7125 && (fndecl = get_callee_fndecl (inner))
7126 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7127 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7128 return arg0;
7130 /* Distribute the expected value over short-circuiting operators.
7131 See through the cast from truthvalue_type_node to long. */
7132 inner = arg0;
7133 while (TREE_CODE (inner) == NOP_EXPR
7134 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7135 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7136 inner = TREE_OPERAND (inner, 0);
7138 code = TREE_CODE (inner);
7139 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7141 tree op0 = TREE_OPERAND (inner, 0);
7142 tree op1 = TREE_OPERAND (inner, 1);
7144 op0 = build_builtin_expect_predicate (op0, arg1);
7145 op1 = build_builtin_expect_predicate (op1, arg1);
7146 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7148 return fold_convert (TREE_TYPE (arg0), inner);
7151 /* If the argument isn't invariant then there's nothing else we can do. */
7152 if (!TREE_CONSTANT (arg0))
7153 return NULL_TREE;
7155 /* If we expect that a comparison against the argument will fold to
7156 a constant return the constant. In practice, this means a true
7157 constant or the address of a non-weak symbol. */
7158 inner = arg0;
7159 STRIP_NOPS (inner);
7160 if (TREE_CODE (inner) == ADDR_EXPR)
7164 inner = TREE_OPERAND (inner, 0);
7166 while (TREE_CODE (inner) == COMPONENT_REF
7167 || TREE_CODE (inner) == ARRAY_REF);
7168 if (DECL_P (inner) && DECL_WEAK (inner))
7169 return NULL_TREE;
7172 /* Otherwise, ARG0 already has the proper type for the return value. */
7173 return arg0;
7176 /* Fold a call to __builtin_classify_type with argument ARG. */
7178 static tree
7179 fold_builtin_classify_type (tree arg)
7181 if (arg == 0)
7182 return build_int_cst (NULL_TREE, no_type_class);
7184 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7187 /* Fold a call to __builtin_strlen with argument ARG. */
7189 static tree
7190 fold_builtin_strlen (tree arg)
7192 if (!validate_arg (arg, POINTER_TYPE))
7193 return NULL_TREE;
7194 else
7196 tree len = c_strlen (arg, 0);
7198 if (len)
7200 /* Convert from the internal "sizetype" type to "size_t". */
7201 if (size_type_node)
7202 len = fold_convert (size_type_node, len);
7203 return len;
7206 return NULL_TREE;
7210 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7212 static tree
7213 fold_builtin_inf (tree type, int warn)
7215 REAL_VALUE_TYPE real;
7217 /* __builtin_inff is intended to be usable to define INFINITY on all
7218 targets. If an infinity is not available, INFINITY expands "to a
7219 positive constant of type float that overflows at translation
7220 time", footnote "In this case, using INFINITY will violate the
7221 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7222 Thus we pedwarn to ensure this constraint violation is
7223 diagnosed. */
7224 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7225 pedwarn ("target format does not support infinity");
7227 real_inf (&real);
7228 return build_real (type, real);
7231 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7233 static tree
7234 fold_builtin_nan (tree arg, tree type, int quiet)
7236 REAL_VALUE_TYPE real;
7237 const char *str;
7239 if (!validate_arg (arg, POINTER_TYPE))
7240 return NULL_TREE;
7241 str = c_getstr (arg);
7242 if (!str)
7243 return NULL_TREE;
7245 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7246 return NULL_TREE;
7248 return build_real (type, real);
7251 /* Return true if the floating point expression T has an integer value.
7252 We also allow +Inf, -Inf and NaN to be considered integer values. */
7254 static bool
7255 integer_valued_real_p (tree t)
7257 switch (TREE_CODE (t))
7259 case FLOAT_EXPR:
7260 return true;
7262 case ABS_EXPR:
7263 case SAVE_EXPR:
7264 return integer_valued_real_p (TREE_OPERAND (t, 0));
7266 case COMPOUND_EXPR:
7267 case MODIFY_EXPR:
7268 case BIND_EXPR:
7269 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7271 case PLUS_EXPR:
7272 case MINUS_EXPR:
7273 case MULT_EXPR:
7274 case MIN_EXPR:
7275 case MAX_EXPR:
7276 return integer_valued_real_p (TREE_OPERAND (t, 0))
7277 && integer_valued_real_p (TREE_OPERAND (t, 1));
7279 case COND_EXPR:
7280 return integer_valued_real_p (TREE_OPERAND (t, 1))
7281 && integer_valued_real_p (TREE_OPERAND (t, 2));
7283 case REAL_CST:
7284 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7286 case NOP_EXPR:
7288 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7289 if (TREE_CODE (type) == INTEGER_TYPE)
7290 return true;
7291 if (TREE_CODE (type) == REAL_TYPE)
7292 return integer_valued_real_p (TREE_OPERAND (t, 0));
7293 break;
7296 case CALL_EXPR:
7297 switch (builtin_mathfn_code (t))
7299 CASE_FLT_FN (BUILT_IN_CEIL):
7300 CASE_FLT_FN (BUILT_IN_FLOOR):
7301 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7302 CASE_FLT_FN (BUILT_IN_RINT):
7303 CASE_FLT_FN (BUILT_IN_ROUND):
7304 CASE_FLT_FN (BUILT_IN_TRUNC):
7305 return true;
7307 CASE_FLT_FN (BUILT_IN_FMIN):
7308 CASE_FLT_FN (BUILT_IN_FMAX):
7309 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7310 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7312 default:
7313 break;
7315 break;
7317 default:
7318 break;
7320 return false;
7323 /* FNDECL is assumed to be a builtin where truncation can be propagated
7324 across (for instance floor((double)f) == (double)floorf (f).
7325 Do the transformation for a call with argument ARG. */
7327 static tree
7328 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7330 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7332 if (!validate_arg (arg, REAL_TYPE))
7333 return NULL_TREE;
7335 /* Integer rounding functions are idempotent. */
7336 if (fcode == builtin_mathfn_code (arg))
7337 return arg;
7339 /* If argument is already integer valued, and we don't need to worry
7340 about setting errno, there's no need to perform rounding. */
7341 if (! flag_errno_math && integer_valued_real_p (arg))
7342 return arg;
7344 if (optimize)
7346 tree arg0 = strip_float_extensions (arg);
7347 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7348 tree newtype = TREE_TYPE (arg0);
7349 tree decl;
7351 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7352 && (decl = mathfn_built_in (newtype, fcode)))
7353 return fold_convert (ftype,
7354 build_call_expr (decl, 1,
7355 fold_convert (newtype, arg0)));
7357 return NULL_TREE;
7360 /* FNDECL is assumed to be builtin which can narrow the FP type of
7361 the argument, for instance lround((double)f) -> lroundf (f).
7362 Do the transformation for a call with argument ARG. */
7364 static tree
7365 fold_fixed_mathfn (tree fndecl, tree arg)
7367 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7369 if (!validate_arg (arg, REAL_TYPE))
7370 return NULL_TREE;
7372 /* If argument is already integer valued, and we don't need to worry
7373 about setting errno, there's no need to perform rounding. */
7374 if (! flag_errno_math && integer_valued_real_p (arg))
7375 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7377 if (optimize)
7379 tree ftype = TREE_TYPE (arg);
7380 tree arg0 = strip_float_extensions (arg);
7381 tree newtype = TREE_TYPE (arg0);
7382 tree decl;
7384 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7385 && (decl = mathfn_built_in (newtype, fcode)))
7386 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7389 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7390 sizeof (long long) == sizeof (long). */
7391 if (TYPE_PRECISION (long_long_integer_type_node)
7392 == TYPE_PRECISION (long_integer_type_node))
7394 tree newfn = NULL_TREE;
7395 switch (fcode)
7397 CASE_FLT_FN (BUILT_IN_LLCEIL):
7398 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7399 break;
7401 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7402 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7403 break;
7405 CASE_FLT_FN (BUILT_IN_LLROUND):
7406 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7407 break;
7409 CASE_FLT_FN (BUILT_IN_LLRINT):
7410 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7411 break;
7413 default:
7414 break;
7417 if (newfn)
7419 tree newcall = build_call_expr(newfn, 1, arg);
7420 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7424 return NULL_TREE;
7427 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7428 return type. Return NULL_TREE if no simplification can be made. */
7430 static tree
7431 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7433 tree res;
7435 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7436 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7437 return NULL_TREE;
7439 /* Calculate the result when the argument is a constant. */
7440 if (TREE_CODE (arg) == COMPLEX_CST
7441 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7442 type, mpfr_hypot)))
7443 return res;
7445 if (TREE_CODE (arg) == COMPLEX_EXPR)
7447 tree real = TREE_OPERAND (arg, 0);
7448 tree imag = TREE_OPERAND (arg, 1);
7450 /* If either part is zero, cabs is fabs of the other. */
7451 if (real_zerop (real))
7452 return fold_build1 (ABS_EXPR, type, imag);
7453 if (real_zerop (imag))
7454 return fold_build1 (ABS_EXPR, type, real);
7456 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7457 if (flag_unsafe_math_optimizations
7458 && operand_equal_p (real, imag, OEP_PURE_SAME))
7460 const REAL_VALUE_TYPE sqrt2_trunc
7461 = real_value_truncate (TYPE_MODE (type),
7462 *get_real_const (rv_sqrt2));
7463 STRIP_NOPS (real);
7464 return fold_build2 (MULT_EXPR, type,
7465 fold_build1 (ABS_EXPR, type, real),
7466 build_real (type, sqrt2_trunc));
7470 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7471 if (TREE_CODE (arg) == NEGATE_EXPR
7472 || TREE_CODE (arg) == CONJ_EXPR)
7473 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7475 /* Don't do this when optimizing for size. */
7476 if (flag_unsafe_math_optimizations
7477 && optimize && !optimize_size)
7479 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7481 if (sqrtfn != NULL_TREE)
7483 tree rpart, ipart, result;
7485 arg = builtin_save_expr (arg);
7487 rpart = fold_build1 (REALPART_EXPR, type, arg);
7488 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7490 rpart = builtin_save_expr (rpart);
7491 ipart = builtin_save_expr (ipart);
7493 result = fold_build2 (PLUS_EXPR, type,
7494 fold_build2 (MULT_EXPR, type,
7495 rpart, rpart),
7496 fold_build2 (MULT_EXPR, type,
7497 ipart, ipart));
7499 return build_call_expr (sqrtfn, 1, result);
7503 return NULL_TREE;
7506 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7507 Return NULL_TREE if no simplification can be made. */
7509 static tree
7510 fold_builtin_sqrt (tree arg, tree type)
7513 enum built_in_function fcode;
7514 tree res;
7516 if (!validate_arg (arg, REAL_TYPE))
7517 return NULL_TREE;
7519 /* Calculate the result when the argument is a constant. */
7520 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7521 return res;
7523 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7524 fcode = builtin_mathfn_code (arg);
7525 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7527 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7528 arg = fold_build2 (MULT_EXPR, type,
7529 CALL_EXPR_ARG (arg, 0),
7530 build_real (type, dconsthalf));
7531 return build_call_expr (expfn, 1, arg);
7534 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7535 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7537 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7539 if (powfn)
7541 tree arg0 = CALL_EXPR_ARG (arg, 0);
7542 tree tree_root;
7543 /* The inner root was either sqrt or cbrt. */
7544 REAL_VALUE_TYPE dconstroot =
7545 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7547 /* Adjust for the outer root. */
7548 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7549 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7550 tree_root = build_real (type, dconstroot);
7551 return build_call_expr (powfn, 2, arg0, tree_root);
7555 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7556 if (flag_unsafe_math_optimizations
7557 && (fcode == BUILT_IN_POW
7558 || fcode == BUILT_IN_POWF
7559 || fcode == BUILT_IN_POWL))
7561 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7562 tree arg0 = CALL_EXPR_ARG (arg, 0);
7563 tree arg1 = CALL_EXPR_ARG (arg, 1);
7564 tree narg1;
7565 if (!tree_expr_nonnegative_p (arg0))
7566 arg0 = build1 (ABS_EXPR, type, arg0);
7567 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7568 build_real (type, dconsthalf));
7569 return build_call_expr (powfn, 2, arg0, narg1);
7572 return NULL_TREE;
7575 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7576 Return NULL_TREE if no simplification can be made. */
7578 static tree
7579 fold_builtin_cbrt (tree arg, tree type)
7581 const enum built_in_function fcode = builtin_mathfn_code (arg);
7582 tree res;
7584 if (!validate_arg (arg, REAL_TYPE))
7585 return NULL_TREE;
7587 /* Calculate the result when the argument is a constant. */
7588 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7589 return res;
7591 if (flag_unsafe_math_optimizations)
7593 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7594 if (BUILTIN_EXPONENT_P (fcode))
7596 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7597 const REAL_VALUE_TYPE third_trunc =
7598 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7599 arg = fold_build2 (MULT_EXPR, type,
7600 CALL_EXPR_ARG (arg, 0),
7601 build_real (type, third_trunc));
7602 return build_call_expr (expfn, 1, arg);
7605 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7606 if (BUILTIN_SQRT_P (fcode))
7608 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7610 if (powfn)
7612 tree arg0 = CALL_EXPR_ARG (arg, 0);
7613 tree tree_root;
7614 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7616 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7617 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7618 tree_root = build_real (type, dconstroot);
7619 return build_call_expr (powfn, 2, arg0, tree_root);
7623 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7624 if (BUILTIN_CBRT_P (fcode))
7626 tree arg0 = CALL_EXPR_ARG (arg, 0);
7627 if (tree_expr_nonnegative_p (arg0))
7629 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7631 if (powfn)
7633 tree tree_root;
7634 REAL_VALUE_TYPE dconstroot;
7636 real_arithmetic (&dconstroot, MULT_EXPR,
7637 get_real_const (rv_third),
7638 get_real_const (rv_third));
7639 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7640 tree_root = build_real (type, dconstroot);
7641 return build_call_expr (powfn, 2, arg0, tree_root);
7646 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7647 if (fcode == BUILT_IN_POW
7648 || fcode == BUILT_IN_POWF
7649 || fcode == BUILT_IN_POWL)
7651 tree arg00 = CALL_EXPR_ARG (arg, 0);
7652 tree arg01 = CALL_EXPR_ARG (arg, 1);
7653 if (tree_expr_nonnegative_p (arg00))
7655 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7656 const REAL_VALUE_TYPE dconstroot
7657 = real_value_truncate (TYPE_MODE (type),
7658 *get_real_const (rv_third));
7659 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7660 build_real (type, dconstroot));
7661 return build_call_expr (powfn, 2, arg00, narg01);
7665 return NULL_TREE;
7668 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7669 TYPE is the type of the return value. Return NULL_TREE if no
7670 simplification can be made. */
7672 static tree
7673 fold_builtin_cos (tree arg, tree type, tree fndecl)
7675 tree res, narg;
7677 if (!validate_arg (arg, REAL_TYPE))
7678 return NULL_TREE;
7680 /* Calculate the result when the argument is a constant. */
7681 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7682 return res;
7684 /* Optimize cos(-x) into cos (x). */
7685 if ((narg = fold_strip_sign_ops (arg)))
7686 return build_call_expr (fndecl, 1, narg);
7688 return NULL_TREE;
7691 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7692 Return NULL_TREE if no simplification can be made. */
7694 static tree
7695 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7697 if (validate_arg (arg, REAL_TYPE))
7699 tree res, narg;
7701 /* Calculate the result when the argument is a constant. */
7702 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7703 return res;
7705 /* Optimize cosh(-x) into cosh (x). */
7706 if ((narg = fold_strip_sign_ops (arg)))
7707 return build_call_expr (fndecl, 1, narg);
7710 return NULL_TREE;
7713 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7714 Return NULL_TREE if no simplification can be made. */
7716 static tree
7717 fold_builtin_tan (tree arg, tree type)
7719 enum built_in_function fcode;
7720 tree res;
7722 if (!validate_arg (arg, REAL_TYPE))
7723 return NULL_TREE;
7725 /* Calculate the result when the argument is a constant. */
7726 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7727 return res;
7729 /* Optimize tan(atan(x)) = x. */
7730 fcode = builtin_mathfn_code (arg);
7731 if (flag_unsafe_math_optimizations
7732 && (fcode == BUILT_IN_ATAN
7733 || fcode == BUILT_IN_ATANF
7734 || fcode == BUILT_IN_ATANL))
7735 return CALL_EXPR_ARG (arg, 0);
7737 return NULL_TREE;
7740 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7741 NULL_TREE if no simplification can be made. */
7743 static tree
7744 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7746 tree type;
7747 tree res, fn, call;
7749 if (!validate_arg (arg0, REAL_TYPE)
7750 || !validate_arg (arg1, POINTER_TYPE)
7751 || !validate_arg (arg2, POINTER_TYPE))
7752 return NULL_TREE;
7754 type = TREE_TYPE (arg0);
7756 /* Calculate the result when the argument is a constant. */
7757 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7758 return res;
7760 /* Canonicalize sincos to cexpi. */
7761 if (!TARGET_C99_FUNCTIONS)
7762 return NULL_TREE;
7763 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7764 if (!fn)
7765 return NULL_TREE;
7767 call = build_call_expr (fn, 1, arg0);
7768 call = builtin_save_expr (call);
7770 return build2 (COMPOUND_EXPR, type,
7771 build2 (MODIFY_EXPR, void_type_node,
7772 build_fold_indirect_ref (arg1),
7773 build1 (IMAGPART_EXPR, type, call)),
7774 build2 (MODIFY_EXPR, void_type_node,
7775 build_fold_indirect_ref (arg2),
7776 build1 (REALPART_EXPR, type, call)));
7779 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7780 NULL_TREE if no simplification can be made. */
7782 static tree
7783 fold_builtin_cexp (tree arg0, tree type)
7785 tree rtype;
7786 tree realp, imagp, ifn;
7788 if (!validate_arg (arg0, COMPLEX_TYPE))
7789 return NULL_TREE;
7791 rtype = TREE_TYPE (TREE_TYPE (arg0));
7793 /* In case we can figure out the real part of arg0 and it is constant zero
7794 fold to cexpi. */
7795 if (!TARGET_C99_FUNCTIONS)
7796 return NULL_TREE;
7797 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7798 if (!ifn)
7799 return NULL_TREE;
7801 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7802 && real_zerop (realp))
7804 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7805 return build_call_expr (ifn, 1, narg);
7808 /* In case we can easily decompose real and imaginary parts split cexp
7809 to exp (r) * cexpi (i). */
7810 if (flag_unsafe_math_optimizations
7811 && realp)
7813 tree rfn, rcall, icall;
7815 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7816 if (!rfn)
7817 return NULL_TREE;
7819 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7820 if (!imagp)
7821 return NULL_TREE;
7823 icall = build_call_expr (ifn, 1, imagp);
7824 icall = builtin_save_expr (icall);
7825 rcall = build_call_expr (rfn, 1, realp);
7826 rcall = builtin_save_expr (rcall);
7827 return fold_build2 (COMPLEX_EXPR, type,
7828 fold_build2 (MULT_EXPR, rtype,
7829 rcall,
7830 fold_build1 (REALPART_EXPR, rtype, icall)),
7831 fold_build2 (MULT_EXPR, rtype,
7832 rcall,
7833 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7836 return NULL_TREE;
7839 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7840 Return NULL_TREE if no simplification can be made. */
7842 static tree
7843 fold_builtin_trunc (tree fndecl, tree arg)
7845 if (!validate_arg (arg, REAL_TYPE))
7846 return NULL_TREE;
7848 /* Optimize trunc of constant value. */
7849 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7851 REAL_VALUE_TYPE r, x;
7852 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7854 x = TREE_REAL_CST (arg);
7855 real_trunc (&r, TYPE_MODE (type), &x);
7856 return build_real (type, r);
7859 return fold_trunc_transparent_mathfn (fndecl, arg);
7862 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7863 Return NULL_TREE if no simplification can be made. */
7865 static tree
7866 fold_builtin_floor (tree fndecl, tree arg)
7868 if (!validate_arg (arg, REAL_TYPE))
7869 return NULL_TREE;
7871 /* Optimize floor of constant value. */
7872 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7874 REAL_VALUE_TYPE x;
7876 x = TREE_REAL_CST (arg);
7877 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7879 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7880 REAL_VALUE_TYPE r;
7882 real_floor (&r, TYPE_MODE (type), &x);
7883 return build_real (type, r);
7887 /* Fold floor (x) where x is nonnegative to trunc (x). */
7888 if (tree_expr_nonnegative_p (arg))
7890 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7891 if (truncfn)
7892 return build_call_expr (truncfn, 1, arg);
7895 return fold_trunc_transparent_mathfn (fndecl, arg);
7898 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7899 Return NULL_TREE if no simplification can be made. */
7901 static tree
7902 fold_builtin_ceil (tree fndecl, tree arg)
7904 if (!validate_arg (arg, REAL_TYPE))
7905 return NULL_TREE;
7907 /* Optimize ceil of constant value. */
7908 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7910 REAL_VALUE_TYPE x;
7912 x = TREE_REAL_CST (arg);
7913 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7915 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7916 REAL_VALUE_TYPE r;
7918 real_ceil (&r, TYPE_MODE (type), &x);
7919 return build_real (type, r);
7923 return fold_trunc_transparent_mathfn (fndecl, arg);
7926 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7927 Return NULL_TREE if no simplification can be made. */
7929 static tree
7930 fold_builtin_round (tree fndecl, tree arg)
7932 if (!validate_arg (arg, REAL_TYPE))
7933 return NULL_TREE;
7935 /* Optimize round of constant value. */
7936 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7938 REAL_VALUE_TYPE x;
7940 x = TREE_REAL_CST (arg);
7941 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7943 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7944 REAL_VALUE_TYPE r;
7946 real_round (&r, TYPE_MODE (type), &x);
7947 return build_real (type, r);
7951 return fold_trunc_transparent_mathfn (fndecl, arg);
7954 /* Fold function call to builtin lround, lroundf or lroundl (or the
7955 corresponding long long versions) and other rounding functions. ARG
7956 is the argument to the call. Return NULL_TREE if no simplification
7957 can be made. */
7959 static tree
7960 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7962 if (!validate_arg (arg, REAL_TYPE))
7963 return NULL_TREE;
7965 /* Optimize lround of constant value. */
7966 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7968 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7970 if (real_isfinite (&x))
7972 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7973 tree ftype = TREE_TYPE (arg);
7974 unsigned HOST_WIDE_INT lo2;
7975 HOST_WIDE_INT hi, lo;
7976 REAL_VALUE_TYPE r;
7978 switch (DECL_FUNCTION_CODE (fndecl))
7980 CASE_FLT_FN (BUILT_IN_LFLOOR):
7981 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7982 real_floor (&r, TYPE_MODE (ftype), &x);
7983 break;
7985 CASE_FLT_FN (BUILT_IN_LCEIL):
7986 CASE_FLT_FN (BUILT_IN_LLCEIL):
7987 real_ceil (&r, TYPE_MODE (ftype), &x);
7988 break;
7990 CASE_FLT_FN (BUILT_IN_LROUND):
7991 CASE_FLT_FN (BUILT_IN_LLROUND):
7992 real_round (&r, TYPE_MODE (ftype), &x);
7993 break;
7995 default:
7996 gcc_unreachable ();
7999 REAL_VALUE_TO_INT (&lo, &hi, r);
8000 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8001 return build_int_cst_wide (itype, lo2, hi);
8005 switch (DECL_FUNCTION_CODE (fndecl))
8007 CASE_FLT_FN (BUILT_IN_LFLOOR):
8008 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8009 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8010 if (tree_expr_nonnegative_p (arg))
8011 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8012 arg);
8013 break;
8014 default:;
8017 return fold_fixed_mathfn (fndecl, arg);
8020 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8021 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8022 the argument to the call. Return NULL_TREE if no simplification can
8023 be made. */
8025 static tree
8026 fold_builtin_bitop (tree fndecl, tree arg)
8028 if (!validate_arg (arg, INTEGER_TYPE))
8029 return NULL_TREE;
8031 /* Optimize for constant argument. */
8032 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8034 HOST_WIDE_INT hi, width, result;
8035 unsigned HOST_WIDE_INT lo;
8036 tree type;
8038 type = TREE_TYPE (arg);
8039 width = TYPE_PRECISION (type);
8040 lo = TREE_INT_CST_LOW (arg);
8042 /* Clear all the bits that are beyond the type's precision. */
8043 if (width > HOST_BITS_PER_WIDE_INT)
8045 hi = TREE_INT_CST_HIGH (arg);
8046 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8047 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8049 else
8051 hi = 0;
8052 if (width < HOST_BITS_PER_WIDE_INT)
8053 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8056 switch (DECL_FUNCTION_CODE (fndecl))
8058 CASE_INT_FN (BUILT_IN_FFS):
8059 if (lo != 0)
8060 result = exact_log2 (lo & -lo) + 1;
8061 else if (hi != 0)
8062 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8063 else
8064 result = 0;
8065 break;
8067 CASE_INT_FN (BUILT_IN_CLZ):
8068 if (hi != 0)
8069 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8070 else if (lo != 0)
8071 result = width - floor_log2 (lo) - 1;
8072 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8073 result = width;
8074 break;
8076 CASE_INT_FN (BUILT_IN_CTZ):
8077 if (lo != 0)
8078 result = exact_log2 (lo & -lo);
8079 else if (hi != 0)
8080 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8081 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8082 result = width;
8083 break;
8085 CASE_INT_FN (BUILT_IN_POPCOUNT):
8086 result = 0;
8087 while (lo)
8088 result++, lo &= lo - 1;
8089 while (hi)
8090 result++, hi &= hi - 1;
8091 break;
8093 CASE_INT_FN (BUILT_IN_PARITY):
8094 result = 0;
8095 while (lo)
8096 result++, lo &= lo - 1;
8097 while (hi)
8098 result++, hi &= hi - 1;
8099 result &= 1;
8100 break;
8102 default:
8103 gcc_unreachable ();
8106 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8109 return NULL_TREE;
8112 /* Fold function call to builtin_bswap and the long and long long
8113 variants. Return NULL_TREE if no simplification can be made. */
8114 static tree
8115 fold_builtin_bswap (tree fndecl, tree arg)
8117 if (! validate_arg (arg, INTEGER_TYPE))
8118 return NULL_TREE;
8120 /* Optimize constant value. */
8121 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8123 HOST_WIDE_INT hi, width, r_hi = 0;
8124 unsigned HOST_WIDE_INT lo, r_lo = 0;
8125 tree type;
8127 type = TREE_TYPE (arg);
8128 width = TYPE_PRECISION (type);
8129 lo = TREE_INT_CST_LOW (arg);
8130 hi = TREE_INT_CST_HIGH (arg);
8132 switch (DECL_FUNCTION_CODE (fndecl))
8134 case BUILT_IN_BSWAP32:
8135 case BUILT_IN_BSWAP64:
8137 int s;
8139 for (s = 0; s < width; s += 8)
8141 int d = width - s - 8;
8142 unsigned HOST_WIDE_INT byte;
8144 if (s < HOST_BITS_PER_WIDE_INT)
8145 byte = (lo >> s) & 0xff;
8146 else
8147 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8149 if (d < HOST_BITS_PER_WIDE_INT)
8150 r_lo |= byte << d;
8151 else
8152 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8156 break;
8158 default:
8159 gcc_unreachable ();
8162 if (width < HOST_BITS_PER_WIDE_INT)
8163 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8164 else
8165 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8168 return NULL_TREE;
8171 /* Return true if EXPR is the real constant contained in VALUE. */
8173 static bool
8174 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8176 STRIP_NOPS (expr);
8178 return ((TREE_CODE (expr) == REAL_CST
8179 && !TREE_OVERFLOW (expr)
8180 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8181 || (TREE_CODE (expr) == COMPLEX_CST
8182 && real_dconstp (TREE_REALPART (expr), value)
8183 && real_zerop (TREE_IMAGPART (expr))));
8186 /* A subroutine of fold_builtin to fold the various logarithmic
8187 functions. Return NULL_TREE if no simplification can me made.
8188 FUNC is the corresponding MPFR logarithm function. */
8190 static tree
8191 fold_builtin_logarithm (tree fndecl, tree arg,
8192 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8194 if (validate_arg (arg, REAL_TYPE))
8196 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8197 tree res;
8198 const enum built_in_function fcode = builtin_mathfn_code (arg);
8200 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8201 instead we'll look for 'e' truncated to MODE. So only do
8202 this if flag_unsafe_math_optimizations is set. */
8203 if (flag_unsafe_math_optimizations && func == mpfr_log)
8205 const REAL_VALUE_TYPE e_truncated =
8206 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8207 if (real_dconstp (arg, &e_truncated))
8208 return build_real (type, dconst1);
8211 /* Calculate the result when the argument is a constant. */
8212 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8213 return res;
8215 /* Special case, optimize logN(expN(x)) = x. */
8216 if (flag_unsafe_math_optimizations
8217 && ((func == mpfr_log
8218 && (fcode == BUILT_IN_EXP
8219 || fcode == BUILT_IN_EXPF
8220 || fcode == BUILT_IN_EXPL))
8221 || (func == mpfr_log2
8222 && (fcode == BUILT_IN_EXP2
8223 || fcode == BUILT_IN_EXP2F
8224 || fcode == BUILT_IN_EXP2L))
8225 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8226 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8228 /* Optimize logN(func()) for various exponential functions. We
8229 want to determine the value "x" and the power "exponent" in
8230 order to transform logN(x**exponent) into exponent*logN(x). */
8231 if (flag_unsafe_math_optimizations)
8233 tree exponent = 0, x = 0;
8235 switch (fcode)
8237 CASE_FLT_FN (BUILT_IN_EXP):
8238 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8239 x = build_real (type,
8240 real_value_truncate (TYPE_MODE (type),
8241 *get_real_const (rv_e)));
8242 exponent = CALL_EXPR_ARG (arg, 0);
8243 break;
8244 CASE_FLT_FN (BUILT_IN_EXP2):
8245 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8246 x = build_real (type, dconst2);
8247 exponent = CALL_EXPR_ARG (arg, 0);
8248 break;
8249 CASE_FLT_FN (BUILT_IN_EXP10):
8250 CASE_FLT_FN (BUILT_IN_POW10):
8251 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8253 REAL_VALUE_TYPE dconst10;
8254 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8255 x = build_real (type, dconst10);
8257 exponent = CALL_EXPR_ARG (arg, 0);
8258 break;
8259 CASE_FLT_FN (BUILT_IN_SQRT):
8260 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8261 x = CALL_EXPR_ARG (arg, 0);
8262 exponent = build_real (type, dconsthalf);
8263 break;
8264 CASE_FLT_FN (BUILT_IN_CBRT):
8265 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8266 x = CALL_EXPR_ARG (arg, 0);
8267 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8268 *get_real_const (rv_third)));
8269 break;
8270 CASE_FLT_FN (BUILT_IN_POW):
8271 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8272 x = CALL_EXPR_ARG (arg, 0);
8273 exponent = CALL_EXPR_ARG (arg, 1);
8274 break;
8275 default:
8276 break;
8279 /* Now perform the optimization. */
8280 if (x && exponent)
8282 tree logfn = build_call_expr (fndecl, 1, x);
8283 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8288 return NULL_TREE;
8291 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8292 NULL_TREE if no simplification can be made. */
8294 static tree
8295 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8297 tree res, narg0, narg1;
8299 if (!validate_arg (arg0, REAL_TYPE)
8300 || !validate_arg (arg1, REAL_TYPE))
8301 return NULL_TREE;
8303 /* Calculate the result when the argument is a constant. */
8304 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8305 return res;
8307 /* If either argument to hypot has a negate or abs, strip that off.
8308 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8309 narg0 = fold_strip_sign_ops (arg0);
8310 narg1 = fold_strip_sign_ops (arg1);
8311 if (narg0 || narg1)
8313 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8314 narg1 ? narg1 : arg1);
8317 /* If either argument is zero, hypot is fabs of the other. */
8318 if (real_zerop (arg0))
8319 return fold_build1 (ABS_EXPR, type, arg1);
8320 else if (real_zerop (arg1))
8321 return fold_build1 (ABS_EXPR, type, arg0);
8323 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8324 if (flag_unsafe_math_optimizations
8325 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8327 const REAL_VALUE_TYPE sqrt2_trunc
8328 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8329 return fold_build2 (MULT_EXPR, type,
8330 fold_build1 (ABS_EXPR, type, arg0),
8331 build_real (type, sqrt2_trunc));
8334 return NULL_TREE;
8338 /* Fold a builtin function call to pow, powf, or powl. Return
8339 NULL_TREE if no simplification can be made. */
8340 static tree
8341 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8343 tree res;
8345 if (!validate_arg (arg0, REAL_TYPE)
8346 || !validate_arg (arg1, REAL_TYPE))
8347 return NULL_TREE;
8349 /* Calculate the result when the argument is a constant. */
8350 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8351 return res;
8353 /* Optimize pow(1.0,y) = 1.0. */
8354 if (real_onep (arg0))
8355 return omit_one_operand (type, build_real (type, dconst1), arg1);
8357 if (TREE_CODE (arg1) == REAL_CST
8358 && !TREE_OVERFLOW (arg1))
8360 REAL_VALUE_TYPE cint;
8361 REAL_VALUE_TYPE c;
8362 HOST_WIDE_INT n;
8364 c = TREE_REAL_CST (arg1);
8366 /* Optimize pow(x,0.0) = 1.0. */
8367 if (REAL_VALUES_EQUAL (c, dconst0))
8368 return omit_one_operand (type, build_real (type, dconst1),
8369 arg0);
8371 /* Optimize pow(x,1.0) = x. */
8372 if (REAL_VALUES_EQUAL (c, dconst1))
8373 return arg0;
8375 /* Optimize pow(x,-1.0) = 1.0/x. */
8376 if (REAL_VALUES_EQUAL (c, dconstm1))
8377 return fold_build2 (RDIV_EXPR, type,
8378 build_real (type, dconst1), arg0);
8380 /* Optimize pow(x,0.5) = sqrt(x). */
8381 if (flag_unsafe_math_optimizations
8382 && REAL_VALUES_EQUAL (c, dconsthalf))
8384 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8386 if (sqrtfn != NULL_TREE)
8387 return build_call_expr (sqrtfn, 1, arg0);
8390 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8391 if (flag_unsafe_math_optimizations)
8393 const REAL_VALUE_TYPE dconstroot
8394 = real_value_truncate (TYPE_MODE (type),
8395 *get_real_const (rv_third));
8397 if (REAL_VALUES_EQUAL (c, dconstroot))
8399 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8400 if (cbrtfn != NULL_TREE)
8401 return build_call_expr (cbrtfn, 1, arg0);
8405 /* Check for an integer exponent. */
8406 n = real_to_integer (&c);
8407 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8408 if (real_identical (&c, &cint))
8410 /* Attempt to evaluate pow at compile-time. */
8411 if (TREE_CODE (arg0) == REAL_CST
8412 && !TREE_OVERFLOW (arg0))
8414 REAL_VALUE_TYPE x;
8415 bool inexact;
8417 x = TREE_REAL_CST (arg0);
8418 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8419 if (flag_unsafe_math_optimizations || !inexact)
8420 return build_real (type, x);
8423 /* Strip sign ops from even integer powers. */
8424 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8426 tree narg0 = fold_strip_sign_ops (arg0);
8427 if (narg0)
8428 return build_call_expr (fndecl, 2, narg0, arg1);
8433 if (flag_unsafe_math_optimizations)
8435 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8437 /* Optimize pow(expN(x),y) = expN(x*y). */
8438 if (BUILTIN_EXPONENT_P (fcode))
8440 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8441 tree arg = CALL_EXPR_ARG (arg0, 0);
8442 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8443 return build_call_expr (expfn, 1, arg);
8446 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8447 if (BUILTIN_SQRT_P (fcode))
8449 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8450 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8451 build_real (type, dconsthalf));
8452 return build_call_expr (fndecl, 2, narg0, narg1);
8455 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8456 if (BUILTIN_CBRT_P (fcode))
8458 tree arg = CALL_EXPR_ARG (arg0, 0);
8459 if (tree_expr_nonnegative_p (arg))
8461 const REAL_VALUE_TYPE dconstroot
8462 = real_value_truncate (TYPE_MODE (type),
8463 *get_real_const (rv_third));
8464 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8465 build_real (type, dconstroot));
8466 return build_call_expr (fndecl, 2, arg, narg1);
8470 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8471 if (fcode == BUILT_IN_POW
8472 || fcode == BUILT_IN_POWF
8473 || fcode == BUILT_IN_POWL)
8475 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8476 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8477 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8478 return build_call_expr (fndecl, 2, arg00, narg1);
8482 return NULL_TREE;
8485 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8486 Return NULL_TREE if no simplification can be made. */
8487 static tree
8488 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8489 tree arg0, tree arg1, tree type)
8491 if (!validate_arg (arg0, REAL_TYPE)
8492 || !validate_arg (arg1, INTEGER_TYPE))
8493 return NULL_TREE;
8495 /* Optimize pow(1.0,y) = 1.0. */
8496 if (real_onep (arg0))
8497 return omit_one_operand (type, build_real (type, dconst1), arg1);
8499 if (host_integerp (arg1, 0))
8501 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8503 /* Evaluate powi at compile-time. */
8504 if (TREE_CODE (arg0) == REAL_CST
8505 && !TREE_OVERFLOW (arg0))
8507 REAL_VALUE_TYPE x;
8508 x = TREE_REAL_CST (arg0);
8509 real_powi (&x, TYPE_MODE (type), &x, c);
8510 return build_real (type, x);
8513 /* Optimize pow(x,0) = 1.0. */
8514 if (c == 0)
8515 return omit_one_operand (type, build_real (type, dconst1),
8516 arg0);
8518 /* Optimize pow(x,1) = x. */
8519 if (c == 1)
8520 return arg0;
8522 /* Optimize pow(x,-1) = 1.0/x. */
8523 if (c == -1)
8524 return fold_build2 (RDIV_EXPR, type,
8525 build_real (type, dconst1), arg0);
8528 return NULL_TREE;
8531 /* A subroutine of fold_builtin to fold the various exponent
8532 functions. Return NULL_TREE if no simplification can be made.
8533 FUNC is the corresponding MPFR exponent function. */
8535 static tree
8536 fold_builtin_exponent (tree fndecl, tree arg,
8537 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8539 if (validate_arg (arg, REAL_TYPE))
8541 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8542 tree res;
8544 /* Calculate the result when the argument is a constant. */
8545 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8546 return res;
8548 /* Optimize expN(logN(x)) = x. */
8549 if (flag_unsafe_math_optimizations)
8551 const enum built_in_function fcode = builtin_mathfn_code (arg);
8553 if ((func == mpfr_exp
8554 && (fcode == BUILT_IN_LOG
8555 || fcode == BUILT_IN_LOGF
8556 || fcode == BUILT_IN_LOGL))
8557 || (func == mpfr_exp2
8558 && (fcode == BUILT_IN_LOG2
8559 || fcode == BUILT_IN_LOG2F
8560 || fcode == BUILT_IN_LOG2L))
8561 || (func == mpfr_exp10
8562 && (fcode == BUILT_IN_LOG10
8563 || fcode == BUILT_IN_LOG10F
8564 || fcode == BUILT_IN_LOG10L)))
8565 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8569 return NULL_TREE;
8572 /* Return true if VAR is a VAR_DECL or a component thereof. */
8574 static bool
8575 var_decl_component_p (tree var)
8577 tree inner = var;
8578 while (handled_component_p (inner))
8579 inner = TREE_OPERAND (inner, 0);
8580 return SSA_VAR_P (inner);
8583 /* Fold function call to builtin memset. Return
8584 NULL_TREE if no simplification can be made. */
8586 static tree
8587 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8589 tree var, ret;
8590 unsigned HOST_WIDE_INT length, cval;
8592 if (! validate_arg (dest, POINTER_TYPE)
8593 || ! validate_arg (c, INTEGER_TYPE)
8594 || ! validate_arg (len, INTEGER_TYPE))
8595 return NULL_TREE;
8597 if (! host_integerp (len, 1))
8598 return NULL_TREE;
8600 /* If the LEN parameter is zero, return DEST. */
8601 if (integer_zerop (len))
8602 return omit_one_operand (type, dest, c);
8604 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8605 return NULL_TREE;
8607 var = dest;
8608 STRIP_NOPS (var);
8609 if (TREE_CODE (var) != ADDR_EXPR)
8610 return NULL_TREE;
8612 var = TREE_OPERAND (var, 0);
8613 if (TREE_THIS_VOLATILE (var))
8614 return NULL_TREE;
8616 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8617 && !POINTER_TYPE_P (TREE_TYPE (var)))
8618 return NULL_TREE;
8620 if (! var_decl_component_p (var))
8621 return NULL_TREE;
8623 length = tree_low_cst (len, 1);
8624 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8625 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8626 < (int) length)
8627 return NULL_TREE;
8629 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8630 return NULL_TREE;
8632 if (integer_zerop (c))
8633 cval = 0;
8634 else
8636 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8637 return NULL_TREE;
8639 cval = tree_low_cst (c, 1);
8640 cval &= 0xff;
8641 cval |= cval << 8;
8642 cval |= cval << 16;
8643 cval |= (cval << 31) << 1;
8646 ret = build_int_cst_type (TREE_TYPE (var), cval);
8647 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8648 if (ignore)
8649 return ret;
8651 return omit_one_operand (type, dest, ret);
8654 /* Fold function call to builtin memset. Return
8655 NULL_TREE if no simplification can be made. */
8657 static tree
8658 fold_builtin_bzero (tree dest, tree size, bool ignore)
8660 if (! validate_arg (dest, POINTER_TYPE)
8661 || ! validate_arg (size, INTEGER_TYPE))
8662 return NULL_TREE;
8664 if (!ignore)
8665 return NULL_TREE;
8667 /* New argument list transforming bzero(ptr x, int y) to
8668 memset(ptr x, int 0, size_t y). This is done this way
8669 so that if it isn't expanded inline, we fallback to
8670 calling bzero instead of memset. */
8672 return fold_builtin_memset (dest, integer_zero_node,
8673 fold_convert (sizetype, size),
8674 void_type_node, ignore);
8677 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8678 NULL_TREE if no simplification can be made.
8679 If ENDP is 0, return DEST (like memcpy).
8680 If ENDP is 1, return DEST+LEN (like mempcpy).
8681 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8682 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8683 (memmove). */
8685 static tree
8686 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8688 tree destvar, srcvar, expr;
8690 if (! validate_arg (dest, POINTER_TYPE)
8691 || ! validate_arg (src, POINTER_TYPE)
8692 || ! validate_arg (len, INTEGER_TYPE))
8693 return NULL_TREE;
8695 /* If the LEN parameter is zero, return DEST. */
8696 if (integer_zerop (len))
8697 return omit_one_operand (type, dest, src);
8699 /* If SRC and DEST are the same (and not volatile), return
8700 DEST{,+LEN,+LEN-1}. */
8701 if (operand_equal_p (src, dest, 0))
8702 expr = len;
8703 else
8705 tree srctype, desttype;
8706 if (endp == 3)
8708 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8709 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8711 /* Both DEST and SRC must be pointer types.
8712 ??? This is what old code did. Is the testing for pointer types
8713 really mandatory?
8715 If either SRC is readonly or length is 1, we can use memcpy. */
8716 if (dest_align && src_align
8717 && (readonly_data_expr (src)
8718 || (host_integerp (len, 1)
8719 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8720 tree_low_cst (len, 1)))))
8722 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8723 if (!fn)
8724 return NULL_TREE;
8725 return build_call_expr (fn, 3, dest, src, len);
8727 return NULL_TREE;
8730 if (!host_integerp (len, 0))
8731 return NULL_TREE;
8732 /* FIXME:
8733 This logic lose for arguments like (type *)malloc (sizeof (type)),
8734 since we strip the casts of up to VOID return value from malloc.
8735 Perhaps we ought to inherit type from non-VOID argument here? */
8736 STRIP_NOPS (src);
8737 STRIP_NOPS (dest);
8738 srctype = TREE_TYPE (TREE_TYPE (src));
8739 desttype = TREE_TYPE (TREE_TYPE (dest));
8740 if (!srctype || !desttype
8741 || !TYPE_SIZE_UNIT (srctype)
8742 || !TYPE_SIZE_UNIT (desttype)
8743 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8744 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8745 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8746 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8747 return NULL_TREE;
8749 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8750 < (int) TYPE_ALIGN (desttype)
8751 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8752 < (int) TYPE_ALIGN (srctype)))
8753 return NULL_TREE;
8755 if (!ignore)
8756 dest = builtin_save_expr (dest);
8758 srcvar = build_fold_indirect_ref (src);
8759 if (TREE_THIS_VOLATILE (srcvar))
8760 return NULL_TREE;
8761 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8762 return NULL_TREE;
8763 /* With memcpy, it is possible to bypass aliasing rules, so without
8764 this check i. e. execute/20060930-2.c would be misoptimized, because
8765 it use conflicting alias set to hold argument for the memcpy call.
8766 This check is probably unnecesary with -fno-strict-aliasing.
8767 Similarly for destvar. See also PR29286. */
8768 if (!var_decl_component_p (srcvar)
8769 /* Accept: memcpy (*char_var, "test", 1); that simplify
8770 to char_var='t'; */
8771 || is_gimple_min_invariant (srcvar)
8772 || readonly_data_expr (src))
8773 return NULL_TREE;
8775 destvar = build_fold_indirect_ref (dest);
8776 if (TREE_THIS_VOLATILE (destvar))
8777 return NULL_TREE;
8778 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8779 return NULL_TREE;
8780 if (!var_decl_component_p (destvar))
8781 return NULL_TREE;
8783 if (srctype == desttype
8784 || (gimple_in_ssa_p (cfun)
8785 && useless_type_conversion_p (desttype, srctype)))
8786 expr = srcvar;
8787 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8788 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8789 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8790 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8791 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8792 else
8793 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8794 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8797 if (ignore)
8798 return expr;
8800 if (endp == 0 || endp == 3)
8801 return omit_one_operand (type, dest, expr);
8803 if (expr == len)
8804 expr = NULL_TREE;
8806 if (endp == 2)
8807 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8808 ssize_int (1));
8810 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8811 dest = fold_convert (type, dest);
8812 if (expr)
8813 dest = omit_one_operand (type, dest, expr);
8814 return dest;
8817 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8818 If LEN is not NULL, it represents the length of the string to be
8819 copied. Return NULL_TREE if no simplification can be made. */
8821 tree
8822 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8824 tree fn;
8826 if (!validate_arg (dest, POINTER_TYPE)
8827 || !validate_arg (src, POINTER_TYPE))
8828 return NULL_TREE;
8830 /* If SRC and DEST are the same (and not volatile), return DEST. */
8831 if (operand_equal_p (src, dest, 0))
8832 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8834 if (optimize_size)
8835 return NULL_TREE;
8837 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8838 if (!fn)
8839 return NULL_TREE;
8841 if (!len)
8843 len = c_strlen (src, 1);
8844 if (! len || TREE_SIDE_EFFECTS (len))
8845 return NULL_TREE;
8848 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8849 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8850 build_call_expr (fn, 3, dest, src, len));
8853 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8854 If SLEN is not NULL, it represents the length of the source string.
8855 Return NULL_TREE if no simplification can be made. */
8857 tree
8858 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8860 tree fn;
8862 if (!validate_arg (dest, POINTER_TYPE)
8863 || !validate_arg (src, POINTER_TYPE)
8864 || !validate_arg (len, INTEGER_TYPE))
8865 return NULL_TREE;
8867 /* If the LEN parameter is zero, return DEST. */
8868 if (integer_zerop (len))
8869 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8871 /* We can't compare slen with len as constants below if len is not a
8872 constant. */
8873 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8874 return NULL_TREE;
8876 if (!slen)
8877 slen = c_strlen (src, 1);
8879 /* Now, we must be passed a constant src ptr parameter. */
8880 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8881 return NULL_TREE;
8883 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8885 /* We do not support simplification of this case, though we do
8886 support it when expanding trees into RTL. */
8887 /* FIXME: generate a call to __builtin_memset. */
8888 if (tree_int_cst_lt (slen, len))
8889 return NULL_TREE;
8891 /* OK transform into builtin memcpy. */
8892 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8893 if (!fn)
8894 return NULL_TREE;
8895 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8896 build_call_expr (fn, 3, dest, src, len));
8899 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8900 arguments to the call, and TYPE is its return type.
8901 Return NULL_TREE if no simplification can be made. */
8903 static tree
8904 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8906 if (!validate_arg (arg1, POINTER_TYPE)
8907 || !validate_arg (arg2, INTEGER_TYPE)
8908 || !validate_arg (len, INTEGER_TYPE))
8909 return NULL_TREE;
8910 else
8912 const char *p1;
8914 if (TREE_CODE (arg2) != INTEGER_CST
8915 || !host_integerp (len, 1))
8916 return NULL_TREE;
8918 p1 = c_getstr (arg1);
8919 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8921 char c;
8922 const char *r;
8923 tree tem;
8925 if (target_char_cast (arg2, &c))
8926 return NULL_TREE;
8928 r = memchr (p1, c, tree_low_cst (len, 1));
8930 if (r == NULL)
8931 return build_int_cst (TREE_TYPE (arg1), 0);
8933 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8934 size_int (r - p1));
8935 return fold_convert (type, tem);
8937 return NULL_TREE;
8941 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8942 Return NULL_TREE if no simplification can be made. */
8944 static tree
8945 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8947 const char *p1, *p2;
8949 if (!validate_arg (arg1, POINTER_TYPE)
8950 || !validate_arg (arg2, POINTER_TYPE)
8951 || !validate_arg (len, INTEGER_TYPE))
8952 return NULL_TREE;
8954 /* If the LEN parameter is zero, return zero. */
8955 if (integer_zerop (len))
8956 return omit_two_operands (integer_type_node, integer_zero_node,
8957 arg1, arg2);
8959 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8960 if (operand_equal_p (arg1, arg2, 0))
8961 return omit_one_operand (integer_type_node, integer_zero_node, len);
8963 p1 = c_getstr (arg1);
8964 p2 = c_getstr (arg2);
8966 /* If all arguments are constant, and the value of len is not greater
8967 than the lengths of arg1 and arg2, evaluate at compile-time. */
8968 if (host_integerp (len, 1) && p1 && p2
8969 && compare_tree_int (len, strlen (p1) + 1) <= 0
8970 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8972 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8974 if (r > 0)
8975 return integer_one_node;
8976 else if (r < 0)
8977 return integer_minus_one_node;
8978 else
8979 return integer_zero_node;
8982 /* If len parameter is one, return an expression corresponding to
8983 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8984 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8986 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8987 tree cst_uchar_ptr_node
8988 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8990 tree ind1 = fold_convert (integer_type_node,
8991 build1 (INDIRECT_REF, cst_uchar_node,
8992 fold_convert (cst_uchar_ptr_node,
8993 arg1)));
8994 tree ind2 = fold_convert (integer_type_node,
8995 build1 (INDIRECT_REF, cst_uchar_node,
8996 fold_convert (cst_uchar_ptr_node,
8997 arg2)));
8998 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9001 return NULL_TREE;
9004 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9005 Return NULL_TREE if no simplification can be made. */
9007 static tree
9008 fold_builtin_strcmp (tree arg1, tree arg2)
9010 const char *p1, *p2;
9012 if (!validate_arg (arg1, POINTER_TYPE)
9013 || !validate_arg (arg2, POINTER_TYPE))
9014 return NULL_TREE;
9016 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9017 if (operand_equal_p (arg1, arg2, 0))
9018 return integer_zero_node;
9020 p1 = c_getstr (arg1);
9021 p2 = c_getstr (arg2);
9023 if (p1 && p2)
9025 const int i = strcmp (p1, p2);
9026 if (i < 0)
9027 return integer_minus_one_node;
9028 else if (i > 0)
9029 return integer_one_node;
9030 else
9031 return integer_zero_node;
9034 /* If the second arg is "", return *(const unsigned char*)arg1. */
9035 if (p2 && *p2 == '\0')
9037 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9038 tree cst_uchar_ptr_node
9039 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9041 return fold_convert (integer_type_node,
9042 build1 (INDIRECT_REF, cst_uchar_node,
9043 fold_convert (cst_uchar_ptr_node,
9044 arg1)));
9047 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9048 if (p1 && *p1 == '\0')
9050 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9051 tree cst_uchar_ptr_node
9052 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9054 tree temp = fold_convert (integer_type_node,
9055 build1 (INDIRECT_REF, cst_uchar_node,
9056 fold_convert (cst_uchar_ptr_node,
9057 arg2)));
9058 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9061 return NULL_TREE;
9064 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9065 Return NULL_TREE if no simplification can be made. */
9067 static tree
9068 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9070 const char *p1, *p2;
9072 if (!validate_arg (arg1, POINTER_TYPE)
9073 || !validate_arg (arg2, POINTER_TYPE)
9074 || !validate_arg (len, INTEGER_TYPE))
9075 return NULL_TREE;
9077 /* If the LEN parameter is zero, return zero. */
9078 if (integer_zerop (len))
9079 return omit_two_operands (integer_type_node, integer_zero_node,
9080 arg1, arg2);
9082 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9083 if (operand_equal_p (arg1, arg2, 0))
9084 return omit_one_operand (integer_type_node, integer_zero_node, len);
9086 p1 = c_getstr (arg1);
9087 p2 = c_getstr (arg2);
9089 if (host_integerp (len, 1) && p1 && p2)
9091 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9092 if (i > 0)
9093 return integer_one_node;
9094 else if (i < 0)
9095 return integer_minus_one_node;
9096 else
9097 return integer_zero_node;
9100 /* If the second arg is "", and the length is greater than zero,
9101 return *(const unsigned char*)arg1. */
9102 if (p2 && *p2 == '\0'
9103 && TREE_CODE (len) == INTEGER_CST
9104 && tree_int_cst_sgn (len) == 1)
9106 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9107 tree cst_uchar_ptr_node
9108 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9110 return fold_convert (integer_type_node,
9111 build1 (INDIRECT_REF, cst_uchar_node,
9112 fold_convert (cst_uchar_ptr_node,
9113 arg1)));
9116 /* If the first arg is "", and the length is greater than zero,
9117 return -*(const unsigned char*)arg2. */
9118 if (p1 && *p1 == '\0'
9119 && TREE_CODE (len) == INTEGER_CST
9120 && tree_int_cst_sgn (len) == 1)
9122 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9123 tree cst_uchar_ptr_node
9124 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9126 tree temp = fold_convert (integer_type_node,
9127 build1 (INDIRECT_REF, cst_uchar_node,
9128 fold_convert (cst_uchar_ptr_node,
9129 arg2)));
9130 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9133 /* If len parameter is one, return an expression corresponding to
9134 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9135 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9137 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9138 tree cst_uchar_ptr_node
9139 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9141 tree ind1 = fold_convert (integer_type_node,
9142 build1 (INDIRECT_REF, cst_uchar_node,
9143 fold_convert (cst_uchar_ptr_node,
9144 arg1)));
9145 tree ind2 = fold_convert (integer_type_node,
9146 build1 (INDIRECT_REF, cst_uchar_node,
9147 fold_convert (cst_uchar_ptr_node,
9148 arg2)));
9149 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9152 return NULL_TREE;
9155 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9156 ARG. Return NULL_TREE if no simplification can be made. */
9158 static tree
9159 fold_builtin_signbit (tree arg, tree type)
9161 tree temp;
9163 if (!validate_arg (arg, REAL_TYPE))
9164 return NULL_TREE;
9166 /* If ARG is a compile-time constant, determine the result. */
9167 if (TREE_CODE (arg) == REAL_CST
9168 && !TREE_OVERFLOW (arg))
9170 REAL_VALUE_TYPE c;
9172 c = TREE_REAL_CST (arg);
9173 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9174 return fold_convert (type, temp);
9177 /* If ARG is non-negative, the result is always zero. */
9178 if (tree_expr_nonnegative_p (arg))
9179 return omit_one_operand (type, integer_zero_node, arg);
9181 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9182 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9183 return fold_build2 (LT_EXPR, type, arg,
9184 build_real (TREE_TYPE (arg), dconst0));
9186 return NULL_TREE;
9189 /* Fold function call to builtin copysign, copysignf or copysignl with
9190 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9191 be made. */
9193 static tree
9194 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9196 tree tem;
9198 if (!validate_arg (arg1, REAL_TYPE)
9199 || !validate_arg (arg2, REAL_TYPE))
9200 return NULL_TREE;
9202 /* copysign(X,X) is X. */
9203 if (operand_equal_p (arg1, arg2, 0))
9204 return fold_convert (type, arg1);
9206 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9207 if (TREE_CODE (arg1) == REAL_CST
9208 && TREE_CODE (arg2) == REAL_CST
9209 && !TREE_OVERFLOW (arg1)
9210 && !TREE_OVERFLOW (arg2))
9212 REAL_VALUE_TYPE c1, c2;
9214 c1 = TREE_REAL_CST (arg1);
9215 c2 = TREE_REAL_CST (arg2);
9216 /* c1.sign := c2.sign. */
9217 real_copysign (&c1, &c2);
9218 return build_real (type, c1);
9221 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9222 Remember to evaluate Y for side-effects. */
9223 if (tree_expr_nonnegative_p (arg2))
9224 return omit_one_operand (type,
9225 fold_build1 (ABS_EXPR, type, arg1),
9226 arg2);
9228 /* Strip sign changing operations for the first argument. */
9229 tem = fold_strip_sign_ops (arg1);
9230 if (tem)
9231 return build_call_expr (fndecl, 2, tem, arg2);
9233 return NULL_TREE;
9236 /* Fold a call to builtin isascii with argument ARG. */
9238 static tree
9239 fold_builtin_isascii (tree arg)
9241 if (!validate_arg (arg, INTEGER_TYPE))
9242 return NULL_TREE;
9243 else
9245 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9246 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9247 build_int_cst (NULL_TREE,
9248 ~ (unsigned HOST_WIDE_INT) 0x7f));
9249 return fold_build2 (EQ_EXPR, integer_type_node,
9250 arg, integer_zero_node);
9254 /* Fold a call to builtin toascii with argument ARG. */
9256 static tree
9257 fold_builtin_toascii (tree arg)
9259 if (!validate_arg (arg, INTEGER_TYPE))
9260 return NULL_TREE;
9262 /* Transform toascii(c) -> (c & 0x7f). */
9263 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9264 build_int_cst (NULL_TREE, 0x7f));
9267 /* Fold a call to builtin isdigit with argument ARG. */
9269 static tree
9270 fold_builtin_isdigit (tree arg)
9272 if (!validate_arg (arg, INTEGER_TYPE))
9273 return NULL_TREE;
9274 else
9276 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9277 /* According to the C standard, isdigit is unaffected by locale.
9278 However, it definitely is affected by the target character set. */
9279 unsigned HOST_WIDE_INT target_digit0
9280 = lang_hooks.to_target_charset ('0');
9282 if (target_digit0 == 0)
9283 return NULL_TREE;
9285 arg = fold_convert (unsigned_type_node, arg);
9286 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9287 build_int_cst (unsigned_type_node, target_digit0));
9288 return fold_build2 (LE_EXPR, integer_type_node, arg,
9289 build_int_cst (unsigned_type_node, 9));
9293 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9295 static tree
9296 fold_builtin_fabs (tree arg, tree type)
9298 if (!validate_arg (arg, REAL_TYPE))
9299 return NULL_TREE;
9301 arg = fold_convert (type, arg);
9302 if (TREE_CODE (arg) == REAL_CST)
9303 return fold_abs_const (arg, type);
9304 return fold_build1 (ABS_EXPR, type, arg);
9307 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9309 static tree
9310 fold_builtin_abs (tree arg, tree type)
9312 if (!validate_arg (arg, INTEGER_TYPE))
9313 return NULL_TREE;
9315 arg = fold_convert (type, arg);
9316 if (TREE_CODE (arg) == INTEGER_CST)
9317 return fold_abs_const (arg, type);
9318 return fold_build1 (ABS_EXPR, type, arg);
9321 /* Fold a call to builtin fmin or fmax. */
9323 static tree
9324 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9326 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9328 /* Calculate the result when the argument is a constant. */
9329 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9331 if (res)
9332 return res;
9334 /* If either argument is NaN, return the other one. Avoid the
9335 transformation if we get (and honor) a signalling NaN. Using
9336 omit_one_operand() ensures we create a non-lvalue. */
9337 if (TREE_CODE (arg0) == REAL_CST
9338 && real_isnan (&TREE_REAL_CST (arg0))
9339 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9340 || ! TREE_REAL_CST (arg0).signalling))
9341 return omit_one_operand (type, arg1, arg0);
9342 if (TREE_CODE (arg1) == REAL_CST
9343 && real_isnan (&TREE_REAL_CST (arg1))
9344 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9345 || ! TREE_REAL_CST (arg1).signalling))
9346 return omit_one_operand (type, arg0, arg1);
9348 /* Transform fmin/fmax(x,x) -> x. */
9349 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9350 return omit_one_operand (type, arg0, arg1);
9352 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9353 functions to return the numeric arg if the other one is NaN.
9354 These tree codes don't honor that, so only transform if
9355 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9356 handled, so we don't have to worry about it either. */
9357 if (flag_finite_math_only)
9358 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9359 fold_convert (type, arg0),
9360 fold_convert (type, arg1));
9362 return NULL_TREE;
9365 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9367 static tree
9368 fold_builtin_carg (tree arg, tree type)
9370 if (validate_arg (arg, COMPLEX_TYPE))
9372 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9374 if (atan2_fn)
9376 tree new_arg = builtin_save_expr (arg);
9377 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9378 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9379 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9383 return NULL_TREE;
9386 /* Fold a call to builtin logb/ilogb. */
9388 static tree
9389 fold_builtin_logb (tree arg, tree rettype)
9391 if (! validate_arg (arg, REAL_TYPE))
9392 return NULL_TREE;
9394 STRIP_NOPS (arg);
9396 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9398 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9400 switch (value->cl)
9402 case rvc_nan:
9403 case rvc_inf:
9404 /* If arg is Inf or NaN and we're logb, return it. */
9405 if (TREE_CODE (rettype) == REAL_TYPE)
9406 return fold_convert (rettype, arg);
9407 /* Fall through... */
9408 case rvc_zero:
9409 /* Zero may set errno and/or raise an exception for logb, also
9410 for ilogb we don't know FP_ILOGB0. */
9411 return NULL_TREE;
9412 case rvc_normal:
9413 /* For normal numbers, proceed iff radix == 2. In GCC,
9414 normalized significands are in the range [0.5, 1.0). We
9415 want the exponent as if they were [1.0, 2.0) so get the
9416 exponent and subtract 1. */
9417 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9418 return fold_convert (rettype, build_int_cst (NULL_TREE,
9419 REAL_EXP (value)-1));
9420 break;
9424 return NULL_TREE;
9427 /* Fold a call to builtin significand, if radix == 2. */
9429 static tree
9430 fold_builtin_significand (tree arg, tree rettype)
9432 if (! validate_arg (arg, REAL_TYPE))
9433 return NULL_TREE;
9435 STRIP_NOPS (arg);
9437 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9439 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9441 switch (value->cl)
9443 case rvc_zero:
9444 case rvc_nan:
9445 case rvc_inf:
9446 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9447 return fold_convert (rettype, arg);
9448 case rvc_normal:
9449 /* For normal numbers, proceed iff radix == 2. */
9450 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9452 REAL_VALUE_TYPE result = *value;
9453 /* In GCC, normalized significands are in the range [0.5,
9454 1.0). We want them to be [1.0, 2.0) so set the
9455 exponent to 1. */
9456 SET_REAL_EXP (&result, 1);
9457 return build_real (rettype, result);
9459 break;
9463 return NULL_TREE;
9466 /* Fold a call to builtin frexp, we can assume the base is 2. */
9468 static tree
9469 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9471 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9472 return NULL_TREE;
9474 STRIP_NOPS (arg0);
9476 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9477 return NULL_TREE;
9479 arg1 = build_fold_indirect_ref (arg1);
9481 /* Proceed if a valid pointer type was passed in. */
9482 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9484 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9485 tree frac, exp;
9487 switch (value->cl)
9489 case rvc_zero:
9490 /* For +-0, return (*exp = 0, +-0). */
9491 exp = integer_zero_node;
9492 frac = arg0;
9493 break;
9494 case rvc_nan:
9495 case rvc_inf:
9496 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9497 return omit_one_operand (rettype, arg0, arg1);
9498 case rvc_normal:
9500 /* Since the frexp function always expects base 2, and in
9501 GCC normalized significands are already in the range
9502 [0.5, 1.0), we have exactly what frexp wants. */
9503 REAL_VALUE_TYPE frac_rvt = *value;
9504 SET_REAL_EXP (&frac_rvt, 0);
9505 frac = build_real (rettype, frac_rvt);
9506 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9508 break;
9509 default:
9510 gcc_unreachable ();
9513 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9514 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9515 TREE_SIDE_EFFECTS (arg1) = 1;
9516 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9519 return NULL_TREE;
9522 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9523 then we can assume the base is two. If it's false, then we have to
9524 check the mode of the TYPE parameter in certain cases. */
9526 static tree
9527 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9529 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9531 STRIP_NOPS (arg0);
9532 STRIP_NOPS (arg1);
9534 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9535 if (real_zerop (arg0) || integer_zerop (arg1)
9536 || (TREE_CODE (arg0) == REAL_CST
9537 && !real_isfinite (&TREE_REAL_CST (arg0))))
9538 return omit_one_operand (type, arg0, arg1);
9540 /* If both arguments are constant, then try to evaluate it. */
9541 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9542 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9543 && host_integerp (arg1, 0))
9545 /* Bound the maximum adjustment to twice the range of the
9546 mode's valid exponents. Use abs to ensure the range is
9547 positive as a sanity check. */
9548 const long max_exp_adj = 2 *
9549 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9550 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9552 /* Get the user-requested adjustment. */
9553 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9555 /* The requested adjustment must be inside this range. This
9556 is a preliminary cap to avoid things like overflow, we
9557 may still fail to compute the result for other reasons. */
9558 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9560 REAL_VALUE_TYPE initial_result;
9562 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9564 /* Ensure we didn't overflow. */
9565 if (! real_isinf (&initial_result))
9567 const REAL_VALUE_TYPE trunc_result
9568 = real_value_truncate (TYPE_MODE (type), initial_result);
9570 /* Only proceed if the target mode can hold the
9571 resulting value. */
9572 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9573 return build_real (type, trunc_result);
9579 return NULL_TREE;
9582 /* Fold a call to builtin modf. */
9584 static tree
9585 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9587 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9588 return NULL_TREE;
9590 STRIP_NOPS (arg0);
9592 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9593 return NULL_TREE;
9595 arg1 = build_fold_indirect_ref (arg1);
9597 /* Proceed if a valid pointer type was passed in. */
9598 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9600 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9601 REAL_VALUE_TYPE trunc, frac;
9603 switch (value->cl)
9605 case rvc_nan:
9606 case rvc_zero:
9607 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9608 trunc = frac = *value;
9609 break;
9610 case rvc_inf:
9611 /* For +-Inf, return (*arg1 = arg0, +-0). */
9612 frac = dconst0;
9613 frac.sign = value->sign;
9614 trunc = *value;
9615 break;
9616 case rvc_normal:
9617 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9618 real_trunc (&trunc, VOIDmode, value);
9619 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9620 /* If the original number was negative and already
9621 integral, then the fractional part is -0.0. */
9622 if (value->sign && frac.cl == rvc_zero)
9623 frac.sign = value->sign;
9624 break;
9627 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9628 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9629 build_real (rettype, trunc));
9630 TREE_SIDE_EFFECTS (arg1) = 1;
9631 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9632 build_real (rettype, frac));
9635 return NULL_TREE;
9638 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9639 ARG is the argument for the call. */
9641 static tree
9642 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9644 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9645 REAL_VALUE_TYPE r;
9647 if (!validate_arg (arg, REAL_TYPE))
9649 error ("non-floating-point argument to function %qs",
9650 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9651 return error_mark_node;
9654 switch (builtin_index)
9656 case BUILT_IN_ISINF:
9657 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9658 return omit_one_operand (type, integer_zero_node, arg);
9660 if (TREE_CODE (arg) == REAL_CST)
9662 r = TREE_REAL_CST (arg);
9663 if (real_isinf (&r))
9664 return real_compare (GT_EXPR, &r, &dconst0)
9665 ? integer_one_node : integer_minus_one_node;
9666 else
9667 return integer_zero_node;
9670 return NULL_TREE;
9672 case BUILT_IN_ISFINITE:
9673 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9674 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9675 return omit_one_operand (type, integer_one_node, arg);
9677 if (TREE_CODE (arg) == REAL_CST)
9679 r = TREE_REAL_CST (arg);
9680 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9683 return NULL_TREE;
9685 case BUILT_IN_ISNAN:
9686 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9687 return omit_one_operand (type, integer_zero_node, arg);
9689 if (TREE_CODE (arg) == REAL_CST)
9691 r = TREE_REAL_CST (arg);
9692 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9695 arg = builtin_save_expr (arg);
9696 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9698 default:
9699 gcc_unreachable ();
9703 /* Fold a call to an unordered comparison function such as
9704 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9705 being called and ARG0 and ARG1 are the arguments for the call.
9706 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9707 the opposite of the desired result. UNORDERED_CODE is used
9708 for modes that can hold NaNs and ORDERED_CODE is used for
9709 the rest. */
9711 static tree
9712 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9713 enum tree_code unordered_code,
9714 enum tree_code ordered_code)
9716 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9717 enum tree_code code;
9718 tree type0, type1;
9719 enum tree_code code0, code1;
9720 tree cmp_type = NULL_TREE;
9722 type0 = TREE_TYPE (arg0);
9723 type1 = TREE_TYPE (arg1);
9725 code0 = TREE_CODE (type0);
9726 code1 = TREE_CODE (type1);
9728 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9729 /* Choose the wider of two real types. */
9730 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9731 ? type0 : type1;
9732 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9733 cmp_type = type0;
9734 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9735 cmp_type = type1;
9736 else
9738 error ("non-floating-point argument to function %qs",
9739 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9740 return error_mark_node;
9743 arg0 = fold_convert (cmp_type, arg0);
9744 arg1 = fold_convert (cmp_type, arg1);
9746 if (unordered_code == UNORDERED_EXPR)
9748 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9749 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9750 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9753 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9754 : ordered_code;
9755 return fold_build1 (TRUTH_NOT_EXPR, type,
9756 fold_build2 (code, type, arg0, arg1));
9759 /* Fold a call to built-in function FNDECL with 0 arguments.
9760 IGNORE is true if the result of the function call is ignored. This
9761 function returns NULL_TREE if no simplification was possible. */
9763 static tree
9764 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9766 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9767 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9768 switch (fcode)
9770 CASE_FLT_FN (BUILT_IN_INF):
9771 case BUILT_IN_INFD32:
9772 case BUILT_IN_INFD64:
9773 case BUILT_IN_INFD128:
9774 return fold_builtin_inf (type, true);
9776 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9777 return fold_builtin_inf (type, false);
9779 case BUILT_IN_CLASSIFY_TYPE:
9780 return fold_builtin_classify_type (NULL_TREE);
9782 default:
9783 break;
9785 return NULL_TREE;
9788 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9789 IGNORE is true if the result of the function call is ignored. This
9790 function returns NULL_TREE if no simplification was possible. */
9792 static tree
9793 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9795 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9796 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9797 switch (fcode)
9800 case BUILT_IN_CONSTANT_P:
9802 tree val = fold_builtin_constant_p (arg0);
9804 /* Gimplification will pull the CALL_EXPR for the builtin out of
9805 an if condition. When not optimizing, we'll not CSE it back.
9806 To avoid link error types of regressions, return false now. */
9807 if (!val && !optimize)
9808 val = integer_zero_node;
9810 return val;
9813 case BUILT_IN_CLASSIFY_TYPE:
9814 return fold_builtin_classify_type (arg0);
9816 case BUILT_IN_STRLEN:
9817 return fold_builtin_strlen (arg0);
9819 CASE_FLT_FN (BUILT_IN_FABS):
9820 return fold_builtin_fabs (arg0, type);
9822 case BUILT_IN_ABS:
9823 case BUILT_IN_LABS:
9824 case BUILT_IN_LLABS:
9825 case BUILT_IN_IMAXABS:
9826 return fold_builtin_abs (arg0, type);
9828 CASE_FLT_FN (BUILT_IN_CONJ):
9829 if (validate_arg (arg0, COMPLEX_TYPE))
9830 return fold_build1 (CONJ_EXPR, type, arg0);
9831 break;
9833 CASE_FLT_FN (BUILT_IN_CREAL):
9834 if (validate_arg (arg0, COMPLEX_TYPE))
9835 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9836 break;
9838 CASE_FLT_FN (BUILT_IN_CIMAG):
9839 if (validate_arg (arg0, COMPLEX_TYPE))
9840 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9841 break;
9843 CASE_FLT_FN (BUILT_IN_CCOS):
9844 CASE_FLT_FN (BUILT_IN_CCOSH):
9845 /* These functions are "even", i.e. f(x) == f(-x). */
9846 if (validate_arg (arg0, COMPLEX_TYPE))
9848 tree narg = fold_strip_sign_ops (arg0);
9849 if (narg)
9850 return build_call_expr (fndecl, 1, narg);
9852 break;
9854 CASE_FLT_FN (BUILT_IN_CABS):
9855 return fold_builtin_cabs (arg0, type, fndecl);
9857 CASE_FLT_FN (BUILT_IN_CARG):
9858 return fold_builtin_carg (arg0, type);
9860 CASE_FLT_FN (BUILT_IN_SQRT):
9861 return fold_builtin_sqrt (arg0, type);
9863 CASE_FLT_FN (BUILT_IN_CBRT):
9864 return fold_builtin_cbrt (arg0, type);
9866 CASE_FLT_FN (BUILT_IN_ASIN):
9867 if (validate_arg (arg0, REAL_TYPE))
9868 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9869 &dconstm1, &dconst1, true);
9870 break;
9872 CASE_FLT_FN (BUILT_IN_ACOS):
9873 if (validate_arg (arg0, REAL_TYPE))
9874 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9875 &dconstm1, &dconst1, true);
9876 break;
9878 CASE_FLT_FN (BUILT_IN_ATAN):
9879 if (validate_arg (arg0, REAL_TYPE))
9880 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9881 break;
9883 CASE_FLT_FN (BUILT_IN_ASINH):
9884 if (validate_arg (arg0, REAL_TYPE))
9885 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9886 break;
9888 CASE_FLT_FN (BUILT_IN_ACOSH):
9889 if (validate_arg (arg0, REAL_TYPE))
9890 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9891 &dconst1, NULL, true);
9892 break;
9894 CASE_FLT_FN (BUILT_IN_ATANH):
9895 if (validate_arg (arg0, REAL_TYPE))
9896 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9897 &dconstm1, &dconst1, false);
9898 break;
9900 CASE_FLT_FN (BUILT_IN_SIN):
9901 if (validate_arg (arg0, REAL_TYPE))
9902 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9903 break;
9905 CASE_FLT_FN (BUILT_IN_COS):
9906 return fold_builtin_cos (arg0, type, fndecl);
9907 break;
9909 CASE_FLT_FN (BUILT_IN_TAN):
9910 return fold_builtin_tan (arg0, type);
9912 CASE_FLT_FN (BUILT_IN_CEXP):
9913 return fold_builtin_cexp (arg0, type);
9915 CASE_FLT_FN (BUILT_IN_CEXPI):
9916 if (validate_arg (arg0, REAL_TYPE))
9917 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9918 break;
9920 CASE_FLT_FN (BUILT_IN_SINH):
9921 if (validate_arg (arg0, REAL_TYPE))
9922 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9923 break;
9925 CASE_FLT_FN (BUILT_IN_COSH):
9926 return fold_builtin_cosh (arg0, type, fndecl);
9928 CASE_FLT_FN (BUILT_IN_TANH):
9929 if (validate_arg (arg0, REAL_TYPE))
9930 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9931 break;
9933 CASE_FLT_FN (BUILT_IN_ERF):
9934 if (validate_arg (arg0, REAL_TYPE))
9935 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9936 break;
9938 CASE_FLT_FN (BUILT_IN_ERFC):
9939 if (validate_arg (arg0, REAL_TYPE))
9940 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9941 break;
9943 CASE_FLT_FN (BUILT_IN_TGAMMA):
9944 if (validate_arg (arg0, REAL_TYPE))
9945 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9946 break;
9948 CASE_FLT_FN (BUILT_IN_EXP):
9949 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9951 CASE_FLT_FN (BUILT_IN_EXP2):
9952 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9954 CASE_FLT_FN (BUILT_IN_EXP10):
9955 CASE_FLT_FN (BUILT_IN_POW10):
9956 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9958 CASE_FLT_FN (BUILT_IN_EXPM1):
9959 if (validate_arg (arg0, REAL_TYPE))
9960 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9961 break;
9963 CASE_FLT_FN (BUILT_IN_LOG):
9964 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9966 CASE_FLT_FN (BUILT_IN_LOG2):
9967 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9969 CASE_FLT_FN (BUILT_IN_LOG10):
9970 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9972 CASE_FLT_FN (BUILT_IN_LOG1P):
9973 if (validate_arg (arg0, REAL_TYPE))
9974 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9975 &dconstm1, NULL, false);
9976 break;
9978 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9979 CASE_FLT_FN (BUILT_IN_J0):
9980 if (validate_arg (arg0, REAL_TYPE))
9981 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9982 NULL, NULL, 0);
9983 break;
9985 CASE_FLT_FN (BUILT_IN_J1):
9986 if (validate_arg (arg0, REAL_TYPE))
9987 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9988 NULL, NULL, 0);
9989 break;
9991 CASE_FLT_FN (BUILT_IN_Y0):
9992 if (validate_arg (arg0, REAL_TYPE))
9993 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9994 &dconst0, NULL, false);
9995 break;
9997 CASE_FLT_FN (BUILT_IN_Y1):
9998 if (validate_arg (arg0, REAL_TYPE))
9999 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10000 &dconst0, NULL, false);
10001 break;
10002 #endif
10004 CASE_FLT_FN (BUILT_IN_NAN):
10005 case BUILT_IN_NAND32:
10006 case BUILT_IN_NAND64:
10007 case BUILT_IN_NAND128:
10008 return fold_builtin_nan (arg0, type, true);
10010 CASE_FLT_FN (BUILT_IN_NANS):
10011 return fold_builtin_nan (arg0, type, false);
10013 CASE_FLT_FN (BUILT_IN_FLOOR):
10014 return fold_builtin_floor (fndecl, arg0);
10016 CASE_FLT_FN (BUILT_IN_CEIL):
10017 return fold_builtin_ceil (fndecl, arg0);
10019 CASE_FLT_FN (BUILT_IN_TRUNC):
10020 return fold_builtin_trunc (fndecl, arg0);
10022 CASE_FLT_FN (BUILT_IN_ROUND):
10023 return fold_builtin_round (fndecl, arg0);
10025 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10026 CASE_FLT_FN (BUILT_IN_RINT):
10027 return fold_trunc_transparent_mathfn (fndecl, arg0);
10029 CASE_FLT_FN (BUILT_IN_LCEIL):
10030 CASE_FLT_FN (BUILT_IN_LLCEIL):
10031 CASE_FLT_FN (BUILT_IN_LFLOOR):
10032 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10033 CASE_FLT_FN (BUILT_IN_LROUND):
10034 CASE_FLT_FN (BUILT_IN_LLROUND):
10035 return fold_builtin_int_roundingfn (fndecl, arg0);
10037 CASE_FLT_FN (BUILT_IN_LRINT):
10038 CASE_FLT_FN (BUILT_IN_LLRINT):
10039 return fold_fixed_mathfn (fndecl, arg0);
10041 case BUILT_IN_BSWAP32:
10042 case BUILT_IN_BSWAP64:
10043 return fold_builtin_bswap (fndecl, arg0);
10045 CASE_INT_FN (BUILT_IN_FFS):
10046 CASE_INT_FN (BUILT_IN_CLZ):
10047 CASE_INT_FN (BUILT_IN_CTZ):
10048 CASE_INT_FN (BUILT_IN_POPCOUNT):
10049 CASE_INT_FN (BUILT_IN_PARITY):
10050 return fold_builtin_bitop (fndecl, arg0);
10052 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10053 return fold_builtin_signbit (arg0, type);
10055 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10056 return fold_builtin_significand (arg0, type);
10058 CASE_FLT_FN (BUILT_IN_ILOGB):
10059 CASE_FLT_FN (BUILT_IN_LOGB):
10060 return fold_builtin_logb (arg0, type);
10062 case BUILT_IN_ISASCII:
10063 return fold_builtin_isascii (arg0);
10065 case BUILT_IN_TOASCII:
10066 return fold_builtin_toascii (arg0);
10068 case BUILT_IN_ISDIGIT:
10069 return fold_builtin_isdigit (arg0);
10071 CASE_FLT_FN (BUILT_IN_FINITE):
10072 case BUILT_IN_FINITED32:
10073 case BUILT_IN_FINITED64:
10074 case BUILT_IN_FINITED128:
10075 case BUILT_IN_ISFINITE:
10076 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10078 CASE_FLT_FN (BUILT_IN_ISINF):
10079 case BUILT_IN_ISINFD32:
10080 case BUILT_IN_ISINFD64:
10081 case BUILT_IN_ISINFD128:
10082 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10084 CASE_FLT_FN (BUILT_IN_ISNAN):
10085 case BUILT_IN_ISNAND32:
10086 case BUILT_IN_ISNAND64:
10087 case BUILT_IN_ISNAND128:
10088 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10090 case BUILT_IN_ISNORMAL:
10091 if (!validate_arg (arg0, REAL_TYPE))
10093 error ("non-floating-point argument to function %qs",
10094 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
10095 return error_mark_node;
10097 break;
10099 case BUILT_IN_PRINTF:
10100 case BUILT_IN_PRINTF_UNLOCKED:
10101 case BUILT_IN_VPRINTF:
10102 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10104 default:
10105 break;
10108 return NULL_TREE;
10112 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10113 IGNORE is true if the result of the function call is ignored. This
10114 function returns NULL_TREE if no simplification was possible. */
10116 static tree
10117 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10119 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10120 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10122 switch (fcode)
10124 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10125 CASE_FLT_FN (BUILT_IN_JN):
10126 if (validate_arg (arg0, INTEGER_TYPE)
10127 && validate_arg (arg1, REAL_TYPE))
10128 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10129 break;
10131 CASE_FLT_FN (BUILT_IN_YN):
10132 if (validate_arg (arg0, INTEGER_TYPE)
10133 && validate_arg (arg1, REAL_TYPE))
10134 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10135 &dconst0, false);
10136 break;
10138 CASE_FLT_FN (BUILT_IN_DREM):
10139 CASE_FLT_FN (BUILT_IN_REMAINDER):
10140 if (validate_arg (arg0, REAL_TYPE)
10141 && validate_arg(arg1, REAL_TYPE))
10142 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10143 break;
10145 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10146 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10147 if (validate_arg (arg0, REAL_TYPE)
10148 && validate_arg(arg1, POINTER_TYPE))
10149 return do_mpfr_lgamma_r (arg0, arg1, type);
10150 break;
10151 #endif
10153 CASE_FLT_FN (BUILT_IN_ATAN2):
10154 if (validate_arg (arg0, REAL_TYPE)
10155 && validate_arg(arg1, REAL_TYPE))
10156 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10157 break;
10159 CASE_FLT_FN (BUILT_IN_FDIM):
10160 if (validate_arg (arg0, REAL_TYPE)
10161 && validate_arg(arg1, REAL_TYPE))
10162 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10163 break;
10165 CASE_FLT_FN (BUILT_IN_HYPOT):
10166 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10168 CASE_FLT_FN (BUILT_IN_LDEXP):
10169 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10170 CASE_FLT_FN (BUILT_IN_SCALBN):
10171 CASE_FLT_FN (BUILT_IN_SCALBLN):
10172 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10174 CASE_FLT_FN (BUILT_IN_FREXP):
10175 return fold_builtin_frexp (arg0, arg1, type);
10177 CASE_FLT_FN (BUILT_IN_MODF):
10178 return fold_builtin_modf (arg0, arg1, type);
10180 case BUILT_IN_BZERO:
10181 return fold_builtin_bzero (arg0, arg1, ignore);
10183 case BUILT_IN_FPUTS:
10184 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10186 case BUILT_IN_FPUTS_UNLOCKED:
10187 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10189 case BUILT_IN_STRSTR:
10190 return fold_builtin_strstr (arg0, arg1, type);
10192 case BUILT_IN_STRCAT:
10193 return fold_builtin_strcat (arg0, arg1);
10195 case BUILT_IN_STRSPN:
10196 return fold_builtin_strspn (arg0, arg1);
10198 case BUILT_IN_STRCSPN:
10199 return fold_builtin_strcspn (arg0, arg1);
10201 case BUILT_IN_STRCHR:
10202 case BUILT_IN_INDEX:
10203 return fold_builtin_strchr (arg0, arg1, type);
10205 case BUILT_IN_STRRCHR:
10206 case BUILT_IN_RINDEX:
10207 return fold_builtin_strrchr (arg0, arg1, type);
10209 case BUILT_IN_STRCPY:
10210 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10212 case BUILT_IN_STRCMP:
10213 return fold_builtin_strcmp (arg0, arg1);
10215 case BUILT_IN_STRPBRK:
10216 return fold_builtin_strpbrk (arg0, arg1, type);
10218 case BUILT_IN_EXPECT:
10219 return fold_builtin_expect (arg0, arg1);
10221 CASE_FLT_FN (BUILT_IN_POW):
10222 return fold_builtin_pow (fndecl, arg0, arg1, type);
10224 CASE_FLT_FN (BUILT_IN_POWI):
10225 return fold_builtin_powi (fndecl, arg0, arg1, type);
10227 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10228 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10230 CASE_FLT_FN (BUILT_IN_FMIN):
10231 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10233 CASE_FLT_FN (BUILT_IN_FMAX):
10234 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10236 case BUILT_IN_ISGREATER:
10237 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10238 case BUILT_IN_ISGREATEREQUAL:
10239 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10240 case BUILT_IN_ISLESS:
10241 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10242 case BUILT_IN_ISLESSEQUAL:
10243 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10244 case BUILT_IN_ISLESSGREATER:
10245 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10246 case BUILT_IN_ISUNORDERED:
10247 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10248 NOP_EXPR);
10250 /* We do the folding for va_start in the expander. */
10251 case BUILT_IN_VA_START:
10252 break;
10254 case BUILT_IN_SPRINTF:
10255 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10257 case BUILT_IN_OBJECT_SIZE:
10258 return fold_builtin_object_size (arg0, arg1);
10260 case BUILT_IN_PRINTF:
10261 case BUILT_IN_PRINTF_UNLOCKED:
10262 case BUILT_IN_VPRINTF:
10263 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10265 case BUILT_IN_PRINTF_CHK:
10266 case BUILT_IN_VPRINTF_CHK:
10267 if (!validate_arg (arg0, INTEGER_TYPE)
10268 || TREE_SIDE_EFFECTS (arg0))
10269 return NULL_TREE;
10270 else
10271 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10272 break;
10274 case BUILT_IN_FPRINTF:
10275 case BUILT_IN_FPRINTF_UNLOCKED:
10276 case BUILT_IN_VFPRINTF:
10277 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10278 ignore, fcode);
10280 default:
10281 break;
10283 return NULL_TREE;
10286 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10287 and ARG2. IGNORE is true if the result of the function call is ignored.
10288 This function returns NULL_TREE if no simplification was possible. */
10290 static tree
10291 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10293 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10294 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10295 switch (fcode)
10298 CASE_FLT_FN (BUILT_IN_SINCOS):
10299 return fold_builtin_sincos (arg0, arg1, arg2);
10301 CASE_FLT_FN (BUILT_IN_FMA):
10302 if (validate_arg (arg0, REAL_TYPE)
10303 && validate_arg(arg1, REAL_TYPE)
10304 && validate_arg(arg2, REAL_TYPE))
10305 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10306 break;
10308 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10309 CASE_FLT_FN (BUILT_IN_REMQUO):
10310 if (validate_arg (arg0, REAL_TYPE)
10311 && validate_arg(arg1, REAL_TYPE)
10312 && validate_arg(arg2, POINTER_TYPE))
10313 return do_mpfr_remquo (arg0, arg1, arg2);
10314 break;
10315 #endif
10317 case BUILT_IN_MEMSET:
10318 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10320 case BUILT_IN_BCOPY:
10321 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10323 case BUILT_IN_MEMCPY:
10324 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10326 case BUILT_IN_MEMPCPY:
10327 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10329 case BUILT_IN_MEMMOVE:
10330 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10332 case BUILT_IN_STRNCAT:
10333 return fold_builtin_strncat (arg0, arg1, arg2);
10335 case BUILT_IN_STRNCPY:
10336 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10338 case BUILT_IN_STRNCMP:
10339 return fold_builtin_strncmp (arg0, arg1, arg2);
10341 case BUILT_IN_MEMCHR:
10342 return fold_builtin_memchr (arg0, arg1, arg2, type);
10344 case BUILT_IN_BCMP:
10345 case BUILT_IN_MEMCMP:
10346 return fold_builtin_memcmp (arg0, arg1, arg2);;
10348 case BUILT_IN_SPRINTF:
10349 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10351 case BUILT_IN_STRCPY_CHK:
10352 case BUILT_IN_STPCPY_CHK:
10353 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10354 ignore, fcode);
10356 case BUILT_IN_STRCAT_CHK:
10357 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10359 case BUILT_IN_PRINTF_CHK:
10360 case BUILT_IN_VPRINTF_CHK:
10361 if (!validate_arg (arg0, INTEGER_TYPE)
10362 || TREE_SIDE_EFFECTS (arg0))
10363 return NULL_TREE;
10364 else
10365 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10366 break;
10368 case BUILT_IN_FPRINTF:
10369 case BUILT_IN_FPRINTF_UNLOCKED:
10370 case BUILT_IN_VFPRINTF:
10371 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10373 case BUILT_IN_FPRINTF_CHK:
10374 case BUILT_IN_VFPRINTF_CHK:
10375 if (!validate_arg (arg1, INTEGER_TYPE)
10376 || TREE_SIDE_EFFECTS (arg1))
10377 return NULL_TREE;
10378 else
10379 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10380 ignore, fcode);
10382 default:
10383 break;
10385 return NULL_TREE;
10388 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10389 ARG2, and ARG3. IGNORE is true if the result of the function call is
10390 ignored. This function returns NULL_TREE if no simplification was
10391 possible. */
10393 static tree
10394 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10395 bool ignore)
10397 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10399 switch (fcode)
10401 case BUILT_IN_MEMCPY_CHK:
10402 case BUILT_IN_MEMPCPY_CHK:
10403 case BUILT_IN_MEMMOVE_CHK:
10404 case BUILT_IN_MEMSET_CHK:
10405 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10406 NULL_TREE, ignore,
10407 DECL_FUNCTION_CODE (fndecl));
10409 case BUILT_IN_STRNCPY_CHK:
10410 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10412 case BUILT_IN_STRNCAT_CHK:
10413 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10415 case BUILT_IN_FPRINTF_CHK:
10416 case BUILT_IN_VFPRINTF_CHK:
10417 if (!validate_arg (arg1, INTEGER_TYPE)
10418 || TREE_SIDE_EFFECTS (arg1))
10419 return NULL_TREE;
10420 else
10421 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10422 ignore, fcode);
10423 break;
10425 default:
10426 break;
10428 return NULL_TREE;
10431 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10432 arguments, where NARGS <= 4. IGNORE is true if the result of the
10433 function call is ignored. This function returns NULL_TREE if no
10434 simplification was possible. Note that this only folds builtins with
10435 fixed argument patterns. Foldings that do varargs-to-varargs
10436 transformations, or that match calls with more than 4 arguments,
10437 need to be handled with fold_builtin_varargs instead. */
10439 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10441 static tree
10442 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10444 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10445 tree ret = NULL_TREE;
10447 /* Verify the number of arguments for type-generic and thus variadic
10448 builtins. */
10449 switch (fcode)
10451 case BUILT_IN_ISFINITE:
10452 case BUILT_IN_ISINF:
10453 case BUILT_IN_ISNAN:
10454 case BUILT_IN_ISNORMAL:
10455 if (nargs < 1)
10457 error ("too few arguments to function %qs",
10458 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
10459 return error_mark_node;
10461 else if (nargs > 1)
10463 error ("too many arguments to function %qs",
10464 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
10465 return error_mark_node;
10467 break;
10469 case BUILT_IN_ISGREATER:
10470 case BUILT_IN_ISGREATEREQUAL:
10471 case BUILT_IN_ISLESS:
10472 case BUILT_IN_ISLESSEQUAL:
10473 case BUILT_IN_ISLESSGREATER:
10474 case BUILT_IN_ISUNORDERED:
10475 if (nargs < 2)
10477 error ("too few arguments to function %qs",
10478 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
10479 return error_mark_node;
10481 else if (nargs > 2)
10483 error ("too many arguments to function %qs",
10484 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
10485 return error_mark_node;
10487 break;
10489 default:
10490 break;
10493 switch (nargs)
10495 case 0:
10496 ret = fold_builtin_0 (fndecl, ignore);
10497 break;
10498 case 1:
10499 ret = fold_builtin_1 (fndecl, args[0], ignore);
10500 break;
10501 case 2:
10502 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10503 break;
10504 case 3:
10505 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10506 break;
10507 case 4:
10508 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10509 ignore);
10510 break;
10511 default:
10512 break;
10514 if (ret)
10516 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10517 TREE_NO_WARNING (ret) = 1;
10518 return ret;
10520 return NULL_TREE;
10523 /* Builtins with folding operations that operate on "..." arguments
10524 need special handling; we need to store the arguments in a convenient
10525 data structure before attempting any folding. Fortunately there are
10526 only a few builtins that fall into this category. FNDECL is the
10527 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10528 result of the function call is ignored. */
10530 static tree
10531 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10533 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10534 tree ret = NULL_TREE;
10536 switch (fcode)
10538 case BUILT_IN_SPRINTF_CHK:
10539 case BUILT_IN_VSPRINTF_CHK:
10540 ret = fold_builtin_sprintf_chk (exp, fcode);
10541 break;
10543 case BUILT_IN_SNPRINTF_CHK:
10544 case BUILT_IN_VSNPRINTF_CHK:
10545 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10547 default:
10548 break;
10550 if (ret)
10552 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10553 TREE_NO_WARNING (ret) = 1;
10554 return ret;
10556 return NULL_TREE;
10559 /* A wrapper function for builtin folding that prevents warnings for
10560 "statement without effect" and the like, caused by removing the
10561 call node earlier than the warning is generated. */
10563 tree
10564 fold_call_expr (tree exp, bool ignore)
10566 tree ret = NULL_TREE;
10567 tree fndecl = get_callee_fndecl (exp);
10568 if (fndecl
10569 && TREE_CODE (fndecl) == FUNCTION_DECL
10570 && DECL_BUILT_IN (fndecl)
10571 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10572 yet. Defer folding until we see all the arguments
10573 (after inlining). */
10574 && !CALL_EXPR_VA_ARG_PACK (exp))
10576 int nargs = call_expr_nargs (exp);
10578 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10579 instead last argument is __builtin_va_arg_pack (). Defer folding
10580 even in that case, until arguments are finalized. */
10581 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10583 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10584 if (fndecl2
10585 && TREE_CODE (fndecl2) == FUNCTION_DECL
10586 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10587 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10588 return NULL_TREE;
10591 /* FIXME: Don't use a list in this interface. */
10592 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10593 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10594 else
10596 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10598 tree *args = CALL_EXPR_ARGP (exp);
10599 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10601 if (!ret)
10602 ret = fold_builtin_varargs (fndecl, exp, ignore);
10603 if (ret)
10605 /* Propagate location information from original call to
10606 expansion of builtin. Otherwise things like
10607 maybe_emit_chk_warning, that operate on the expansion
10608 of a builtin, will use the wrong location information. */
10609 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10611 tree realret = ret;
10612 if (TREE_CODE (ret) == NOP_EXPR)
10613 realret = TREE_OPERAND (ret, 0);
10614 if (CAN_HAVE_LOCATION_P (realret)
10615 && !EXPR_HAS_LOCATION (realret))
10616 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10618 return ret;
10622 return NULL_TREE;
10625 /* Conveniently construct a function call expression. FNDECL names the
10626 function to be called and ARGLIST is a TREE_LIST of arguments. */
10628 tree
10629 build_function_call_expr (tree fndecl, tree arglist)
10631 tree fntype = TREE_TYPE (fndecl);
10632 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10633 int n = list_length (arglist);
10634 tree *argarray = (tree *) alloca (n * sizeof (tree));
10635 int i;
10637 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10638 argarray[i] = TREE_VALUE (arglist);
10639 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10642 /* Conveniently construct a function call expression. FNDECL names the
10643 function to be called, N is the number of arguments, and the "..."
10644 parameters are the argument expressions. */
10646 tree
10647 build_call_expr (tree fndecl, int n, ...)
10649 va_list ap;
10650 tree fntype = TREE_TYPE (fndecl);
10651 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10652 tree *argarray = (tree *) alloca (n * sizeof (tree));
10653 int i;
10655 va_start (ap, n);
10656 for (i = 0; i < n; i++)
10657 argarray[i] = va_arg (ap, tree);
10658 va_end (ap);
10659 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10662 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10663 N arguments are passed in the array ARGARRAY. */
10665 tree
10666 fold_builtin_call_array (tree type,
10667 tree fn,
10668 int n,
10669 tree *argarray)
10671 tree ret = NULL_TREE;
10672 int i;
10673 tree exp;
10675 if (TREE_CODE (fn) == ADDR_EXPR)
10677 tree fndecl = TREE_OPERAND (fn, 0);
10678 if (TREE_CODE (fndecl) == FUNCTION_DECL
10679 && DECL_BUILT_IN (fndecl))
10681 /* If last argument is __builtin_va_arg_pack (), arguments to this
10682 function are not finalized yet. Defer folding until they are. */
10683 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10685 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10686 if (fndecl2
10687 && TREE_CODE (fndecl2) == FUNCTION_DECL
10688 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10689 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10690 return build_call_array (type, fn, n, argarray);
10692 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10694 tree arglist = NULL_TREE;
10695 for (i = n - 1; i >= 0; i--)
10696 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10697 ret = targetm.fold_builtin (fndecl, arglist, false);
10698 if (ret)
10699 return ret;
10701 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10703 /* First try the transformations that don't require consing up
10704 an exp. */
10705 ret = fold_builtin_n (fndecl, argarray, n, false);
10706 if (ret)
10707 return ret;
10710 /* If we got this far, we need to build an exp. */
10711 exp = build_call_array (type, fn, n, argarray);
10712 ret = fold_builtin_varargs (fndecl, exp, false);
10713 return ret ? ret : exp;
10717 return build_call_array (type, fn, n, argarray);
10720 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10721 along with N new arguments specified as the "..." parameters. SKIP
10722 is the number of arguments in EXP to be omitted. This function is used
10723 to do varargs-to-varargs transformations. */
10725 static tree
10726 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10728 int oldnargs = call_expr_nargs (exp);
10729 int nargs = oldnargs - skip + n;
10730 tree fntype = TREE_TYPE (fndecl);
10731 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10732 tree *buffer;
10734 if (n > 0)
10736 int i, j;
10737 va_list ap;
10739 buffer = alloca (nargs * sizeof (tree));
10740 va_start (ap, n);
10741 for (i = 0; i < n; i++)
10742 buffer[i] = va_arg (ap, tree);
10743 va_end (ap);
10744 for (j = skip; j < oldnargs; j++, i++)
10745 buffer[i] = CALL_EXPR_ARG (exp, j);
10747 else
10748 buffer = CALL_EXPR_ARGP (exp) + skip;
10750 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10753 /* Validate a single argument ARG against a tree code CODE representing
10754 a type. */
10756 static bool
10757 validate_arg (const_tree arg, enum tree_code code)
10759 if (!arg)
10760 return false;
10761 else if (code == POINTER_TYPE)
10762 return POINTER_TYPE_P (TREE_TYPE (arg));
10763 else if (code == INTEGER_TYPE)
10764 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10765 return code == TREE_CODE (TREE_TYPE (arg));
10768 /* This function validates the types of a function call argument list
10769 against a specified list of tree_codes. If the last specifier is a 0,
10770 that represents an ellipses, otherwise the last specifier must be a
10771 VOID_TYPE. */
10773 bool
10774 validate_arglist (const_tree callexpr, ...)
10776 enum tree_code code;
10777 bool res = 0;
10778 va_list ap;
10779 const_call_expr_arg_iterator iter;
10780 const_tree arg;
10782 va_start (ap, callexpr);
10783 init_const_call_expr_arg_iterator (callexpr, &iter);
10787 code = va_arg (ap, enum tree_code);
10788 switch (code)
10790 case 0:
10791 /* This signifies an ellipses, any further arguments are all ok. */
10792 res = true;
10793 goto end;
10794 case VOID_TYPE:
10795 /* This signifies an endlink, if no arguments remain, return
10796 true, otherwise return false. */
10797 res = !more_const_call_expr_args_p (&iter);
10798 goto end;
10799 default:
10800 /* If no parameters remain or the parameter's code does not
10801 match the specified code, return false. Otherwise continue
10802 checking any remaining arguments. */
10803 arg = next_const_call_expr_arg (&iter);
10804 if (!validate_arg (arg, code))
10805 goto end;
10806 break;
10809 while (1);
10811 /* We need gotos here since we can only have one VA_CLOSE in a
10812 function. */
10813 end: ;
10814 va_end (ap);
10816 return res;
10819 /* Default target-specific builtin expander that does nothing. */
10822 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10823 rtx target ATTRIBUTE_UNUSED,
10824 rtx subtarget ATTRIBUTE_UNUSED,
10825 enum machine_mode mode ATTRIBUTE_UNUSED,
10826 int ignore ATTRIBUTE_UNUSED)
10828 return NULL_RTX;
10831 /* Returns true is EXP represents data that would potentially reside
10832 in a readonly section. */
10834 static bool
10835 readonly_data_expr (tree exp)
10837 STRIP_NOPS (exp);
10839 if (TREE_CODE (exp) != ADDR_EXPR)
10840 return false;
10842 exp = get_base_address (TREE_OPERAND (exp, 0));
10843 if (!exp)
10844 return false;
10846 /* Make sure we call decl_readonly_section only for trees it
10847 can handle (since it returns true for everything it doesn't
10848 understand). */
10849 if (TREE_CODE (exp) == STRING_CST
10850 || TREE_CODE (exp) == CONSTRUCTOR
10851 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10852 return decl_readonly_section (exp, 0);
10853 else
10854 return false;
10857 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10858 to the call, and TYPE is its return type.
10860 Return NULL_TREE if no simplification was possible, otherwise return the
10861 simplified form of the call as a tree.
10863 The simplified form may be a constant or other expression which
10864 computes the same value, but in a more efficient manner (including
10865 calls to other builtin functions).
10867 The call may contain arguments which need to be evaluated, but
10868 which are not useful to determine the result of the call. In
10869 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10870 COMPOUND_EXPR will be an argument which must be evaluated.
10871 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10872 COMPOUND_EXPR in the chain will contain the tree for the simplified
10873 form of the builtin function call. */
10875 static tree
10876 fold_builtin_strstr (tree s1, tree s2, tree type)
10878 if (!validate_arg (s1, POINTER_TYPE)
10879 || !validate_arg (s2, POINTER_TYPE))
10880 return NULL_TREE;
10881 else
10883 tree fn;
10884 const char *p1, *p2;
10886 p2 = c_getstr (s2);
10887 if (p2 == NULL)
10888 return NULL_TREE;
10890 p1 = c_getstr (s1);
10891 if (p1 != NULL)
10893 const char *r = strstr (p1, p2);
10894 tree tem;
10896 if (r == NULL)
10897 return build_int_cst (TREE_TYPE (s1), 0);
10899 /* Return an offset into the constant string argument. */
10900 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10901 s1, size_int (r - p1));
10902 return fold_convert (type, tem);
10905 /* The argument is const char *, and the result is char *, so we need
10906 a type conversion here to avoid a warning. */
10907 if (p2[0] == '\0')
10908 return fold_convert (type, s1);
10910 if (p2[1] != '\0')
10911 return NULL_TREE;
10913 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10914 if (!fn)
10915 return NULL_TREE;
10917 /* New argument list transforming strstr(s1, s2) to
10918 strchr(s1, s2[0]). */
10919 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10923 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10924 the call, and TYPE is its return type.
10926 Return NULL_TREE if no simplification was possible, otherwise return the
10927 simplified form of the call as a tree.
10929 The simplified form may be a constant or other expression which
10930 computes the same value, but in a more efficient manner (including
10931 calls to other builtin functions).
10933 The call may contain arguments which need to be evaluated, but
10934 which are not useful to determine the result of the call. In
10935 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10936 COMPOUND_EXPR will be an argument which must be evaluated.
10937 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10938 COMPOUND_EXPR in the chain will contain the tree for the simplified
10939 form of the builtin function call. */
10941 static tree
10942 fold_builtin_strchr (tree s1, tree s2, tree type)
10944 if (!validate_arg (s1, POINTER_TYPE)
10945 || !validate_arg (s2, INTEGER_TYPE))
10946 return NULL_TREE;
10947 else
10949 const char *p1;
10951 if (TREE_CODE (s2) != INTEGER_CST)
10952 return NULL_TREE;
10954 p1 = c_getstr (s1);
10955 if (p1 != NULL)
10957 char c;
10958 const char *r;
10959 tree tem;
10961 if (target_char_cast (s2, &c))
10962 return NULL_TREE;
10964 r = strchr (p1, c);
10966 if (r == NULL)
10967 return build_int_cst (TREE_TYPE (s1), 0);
10969 /* Return an offset into the constant string argument. */
10970 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10971 s1, size_int (r - p1));
10972 return fold_convert (type, tem);
10974 return NULL_TREE;
10978 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10979 the call, and TYPE is its return type.
10981 Return NULL_TREE if no simplification was possible, otherwise return the
10982 simplified form of the call as a tree.
10984 The simplified form may be a constant or other expression which
10985 computes the same value, but in a more efficient manner (including
10986 calls to other builtin functions).
10988 The call may contain arguments which need to be evaluated, but
10989 which are not useful to determine the result of the call. In
10990 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10991 COMPOUND_EXPR will be an argument which must be evaluated.
10992 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10993 COMPOUND_EXPR in the chain will contain the tree for the simplified
10994 form of the builtin function call. */
10996 static tree
10997 fold_builtin_strrchr (tree s1, tree s2, tree type)
10999 if (!validate_arg (s1, POINTER_TYPE)
11000 || !validate_arg (s2, INTEGER_TYPE))
11001 return NULL_TREE;
11002 else
11004 tree fn;
11005 const char *p1;
11007 if (TREE_CODE (s2) != INTEGER_CST)
11008 return NULL_TREE;
11010 p1 = c_getstr (s1);
11011 if (p1 != NULL)
11013 char c;
11014 const char *r;
11015 tree tem;
11017 if (target_char_cast (s2, &c))
11018 return NULL_TREE;
11020 r = strrchr (p1, c);
11022 if (r == NULL)
11023 return build_int_cst (TREE_TYPE (s1), 0);
11025 /* Return an offset into the constant string argument. */
11026 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11027 s1, size_int (r - p1));
11028 return fold_convert (type, tem);
11031 if (! integer_zerop (s2))
11032 return NULL_TREE;
11034 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11035 if (!fn)
11036 return NULL_TREE;
11038 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11039 return build_call_expr (fn, 2, s1, s2);
11043 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11044 to the call, and TYPE is its return type.
11046 Return NULL_TREE if no simplification was possible, otherwise return the
11047 simplified form of the call as a tree.
11049 The simplified form may be a constant or other expression which
11050 computes the same value, but in a more efficient manner (including
11051 calls to other builtin functions).
11053 The call may contain arguments which need to be evaluated, but
11054 which are not useful to determine the result of the call. In
11055 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11056 COMPOUND_EXPR will be an argument which must be evaluated.
11057 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11058 COMPOUND_EXPR in the chain will contain the tree for the simplified
11059 form of the builtin function call. */
11061 static tree
11062 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11064 if (!validate_arg (s1, POINTER_TYPE)
11065 || !validate_arg (s2, POINTER_TYPE))
11066 return NULL_TREE;
11067 else
11069 tree fn;
11070 const char *p1, *p2;
11072 p2 = c_getstr (s2);
11073 if (p2 == NULL)
11074 return NULL_TREE;
11076 p1 = c_getstr (s1);
11077 if (p1 != NULL)
11079 const char *r = strpbrk (p1, p2);
11080 tree tem;
11082 if (r == NULL)
11083 return build_int_cst (TREE_TYPE (s1), 0);
11085 /* Return an offset into the constant string argument. */
11086 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11087 s1, size_int (r - p1));
11088 return fold_convert (type, tem);
11091 if (p2[0] == '\0')
11092 /* strpbrk(x, "") == NULL.
11093 Evaluate and ignore s1 in case it had side-effects. */
11094 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11096 if (p2[1] != '\0')
11097 return NULL_TREE; /* Really call strpbrk. */
11099 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11100 if (!fn)
11101 return NULL_TREE;
11103 /* New argument list transforming strpbrk(s1, s2) to
11104 strchr(s1, s2[0]). */
11105 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11109 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11110 to the call.
11112 Return NULL_TREE if no simplification was possible, otherwise return the
11113 simplified form of the call as a tree.
11115 The simplified form may be a constant or other expression which
11116 computes the same value, but in a more efficient manner (including
11117 calls to other builtin functions).
11119 The call may contain arguments which need to be evaluated, but
11120 which are not useful to determine the result of the call. In
11121 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11122 COMPOUND_EXPR will be an argument which must be evaluated.
11123 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11124 COMPOUND_EXPR in the chain will contain the tree for the simplified
11125 form of the builtin function call. */
11127 static tree
11128 fold_builtin_strcat (tree dst, tree src)
11130 if (!validate_arg (dst, POINTER_TYPE)
11131 || !validate_arg (src, POINTER_TYPE))
11132 return NULL_TREE;
11133 else
11135 const char *p = c_getstr (src);
11137 /* If the string length is zero, return the dst parameter. */
11138 if (p && *p == '\0')
11139 return dst;
11141 return NULL_TREE;
11145 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11146 arguments to the call.
11148 Return NULL_TREE if no simplification was possible, otherwise return the
11149 simplified form of the call as a tree.
11151 The simplified form may be a constant or other expression which
11152 computes the same value, but in a more efficient manner (including
11153 calls to other builtin functions).
11155 The call may contain arguments which need to be evaluated, but
11156 which are not useful to determine the result of the call. In
11157 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11158 COMPOUND_EXPR will be an argument which must be evaluated.
11159 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11160 COMPOUND_EXPR in the chain will contain the tree for the simplified
11161 form of the builtin function call. */
11163 static tree
11164 fold_builtin_strncat (tree dst, tree src, tree len)
11166 if (!validate_arg (dst, POINTER_TYPE)
11167 || !validate_arg (src, POINTER_TYPE)
11168 || !validate_arg (len, INTEGER_TYPE))
11169 return NULL_TREE;
11170 else
11172 const char *p = c_getstr (src);
11174 /* If the requested length is zero, or the src parameter string
11175 length is zero, return the dst parameter. */
11176 if (integer_zerop (len) || (p && *p == '\0'))
11177 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11179 /* If the requested len is greater than or equal to the string
11180 length, call strcat. */
11181 if (TREE_CODE (len) == INTEGER_CST && p
11182 && compare_tree_int (len, strlen (p)) >= 0)
11184 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11186 /* If the replacement _DECL isn't initialized, don't do the
11187 transformation. */
11188 if (!fn)
11189 return NULL_TREE;
11191 return build_call_expr (fn, 2, dst, src);
11193 return NULL_TREE;
11197 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11198 to the call.
11200 Return NULL_TREE if no simplification was possible, otherwise return the
11201 simplified form of the call as a tree.
11203 The simplified form may be a constant or other expression which
11204 computes the same value, but in a more efficient manner (including
11205 calls to other builtin functions).
11207 The call may contain arguments which need to be evaluated, but
11208 which are not useful to determine the result of the call. In
11209 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11210 COMPOUND_EXPR will be an argument which must be evaluated.
11211 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11212 COMPOUND_EXPR in the chain will contain the tree for the simplified
11213 form of the builtin function call. */
11215 static tree
11216 fold_builtin_strspn (tree s1, tree s2)
11218 if (!validate_arg (s1, POINTER_TYPE)
11219 || !validate_arg (s2, POINTER_TYPE))
11220 return NULL_TREE;
11221 else
11223 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11225 /* If both arguments are constants, evaluate at compile-time. */
11226 if (p1 && p2)
11228 const size_t r = strspn (p1, p2);
11229 return size_int (r);
11232 /* If either argument is "", return NULL_TREE. */
11233 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11234 /* Evaluate and ignore both arguments in case either one has
11235 side-effects. */
11236 return omit_two_operands (integer_type_node, integer_zero_node,
11237 s1, s2);
11238 return NULL_TREE;
11242 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11243 to the call.
11245 Return NULL_TREE if no simplification was possible, otherwise return the
11246 simplified form of the call as a tree.
11248 The simplified form may be a constant or other expression which
11249 computes the same value, but in a more efficient manner (including
11250 calls to other builtin functions).
11252 The call may contain arguments which need to be evaluated, but
11253 which are not useful to determine the result of the call. In
11254 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11255 COMPOUND_EXPR will be an argument which must be evaluated.
11256 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11257 COMPOUND_EXPR in the chain will contain the tree for the simplified
11258 form of the builtin function call. */
11260 static tree
11261 fold_builtin_strcspn (tree s1, tree s2)
11263 if (!validate_arg (s1, POINTER_TYPE)
11264 || !validate_arg (s2, POINTER_TYPE))
11265 return NULL_TREE;
11266 else
11268 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11270 /* If both arguments are constants, evaluate at compile-time. */
11271 if (p1 && p2)
11273 const size_t r = strcspn (p1, p2);
11274 return size_int (r);
11277 /* If the first argument is "", return NULL_TREE. */
11278 if (p1 && *p1 == '\0')
11280 /* Evaluate and ignore argument s2 in case it has
11281 side-effects. */
11282 return omit_one_operand (integer_type_node,
11283 integer_zero_node, s2);
11286 /* If the second argument is "", return __builtin_strlen(s1). */
11287 if (p2 && *p2 == '\0')
11289 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11291 /* If the replacement _DECL isn't initialized, don't do the
11292 transformation. */
11293 if (!fn)
11294 return NULL_TREE;
11296 return build_call_expr (fn, 1, s1);
11298 return NULL_TREE;
11302 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11303 to the call. IGNORE is true if the value returned
11304 by the builtin will be ignored. UNLOCKED is true is true if this
11305 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11306 the known length of the string. Return NULL_TREE if no simplification
11307 was possible. */
11309 tree
11310 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11312 /* If we're using an unlocked function, assume the other unlocked
11313 functions exist explicitly. */
11314 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11315 : implicit_built_in_decls[BUILT_IN_FPUTC];
11316 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11317 : implicit_built_in_decls[BUILT_IN_FWRITE];
11319 /* If the return value is used, don't do the transformation. */
11320 if (!ignore)
11321 return NULL_TREE;
11323 /* Verify the arguments in the original call. */
11324 if (!validate_arg (arg0, POINTER_TYPE)
11325 || !validate_arg (arg1, POINTER_TYPE))
11326 return NULL_TREE;
11328 if (! len)
11329 len = c_strlen (arg0, 0);
11331 /* Get the length of the string passed to fputs. If the length
11332 can't be determined, punt. */
11333 if (!len
11334 || TREE_CODE (len) != INTEGER_CST)
11335 return NULL_TREE;
11337 switch (compare_tree_int (len, 1))
11339 case -1: /* length is 0, delete the call entirely . */
11340 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11342 case 0: /* length is 1, call fputc. */
11344 const char *p = c_getstr (arg0);
11346 if (p != NULL)
11348 if (fn_fputc)
11349 return build_call_expr (fn_fputc, 2,
11350 build_int_cst (NULL_TREE, p[0]), arg1);
11351 else
11352 return NULL_TREE;
11355 /* FALLTHROUGH */
11356 case 1: /* length is greater than 1, call fwrite. */
11358 /* If optimizing for size keep fputs. */
11359 if (optimize_size)
11360 return NULL_TREE;
11361 /* New argument list transforming fputs(string, stream) to
11362 fwrite(string, 1, len, stream). */
11363 if (fn_fwrite)
11364 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11365 else
11366 return NULL_TREE;
11368 default:
11369 gcc_unreachable ();
11371 return NULL_TREE;
11374 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11375 produced. False otherwise. This is done so that we don't output the error
11376 or warning twice or three times. */
11377 bool
11378 fold_builtin_next_arg (tree exp, bool va_start_p)
11380 tree fntype = TREE_TYPE (current_function_decl);
11381 int nargs = call_expr_nargs (exp);
11382 tree arg;
11384 if (TYPE_ARG_TYPES (fntype) == 0
11385 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11386 == void_type_node))
11388 error ("%<va_start%> used in function with fixed args");
11389 return true;
11392 if (va_start_p)
11394 if (va_start_p && (nargs != 2))
11396 error ("wrong number of arguments to function %<va_start%>");
11397 return true;
11399 arg = CALL_EXPR_ARG (exp, 1);
11401 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11402 when we checked the arguments and if needed issued a warning. */
11403 else
11405 if (nargs == 0)
11407 /* Evidently an out of date version of <stdarg.h>; can't validate
11408 va_start's second argument, but can still work as intended. */
11409 warning (0, "%<__builtin_next_arg%> called without an argument");
11410 return true;
11412 else if (nargs > 1)
11414 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11415 return true;
11417 arg = CALL_EXPR_ARG (exp, 0);
11420 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11421 or __builtin_next_arg (0) the first time we see it, after checking
11422 the arguments and if needed issuing a warning. */
11423 if (!integer_zerop (arg))
11425 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11427 /* Strip off all nops for the sake of the comparison. This
11428 is not quite the same as STRIP_NOPS. It does more.
11429 We must also strip off INDIRECT_EXPR for C++ reference
11430 parameters. */
11431 while (TREE_CODE (arg) == NOP_EXPR
11432 || TREE_CODE (arg) == CONVERT_EXPR
11433 || TREE_CODE (arg) == INDIRECT_REF)
11434 arg = TREE_OPERAND (arg, 0);
11435 if (arg != last_parm)
11437 /* FIXME: Sometimes with the tree optimizers we can get the
11438 not the last argument even though the user used the last
11439 argument. We just warn and set the arg to be the last
11440 argument so that we will get wrong-code because of
11441 it. */
11442 warning (0, "second parameter of %<va_start%> not last named argument");
11444 /* We want to verify the second parameter just once before the tree
11445 optimizers are run and then avoid keeping it in the tree,
11446 as otherwise we could warn even for correct code like:
11447 void foo (int i, ...)
11448 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11449 if (va_start_p)
11450 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11451 else
11452 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11454 return false;
11458 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11459 ORIG may be null if this is a 2-argument call. We don't attempt to
11460 simplify calls with more than 3 arguments.
11462 Return NULL_TREE if no simplification was possible, otherwise return the
11463 simplified form of the call as a tree. If IGNORED is true, it means that
11464 the caller does not use the returned value of the function. */
11466 static tree
11467 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11469 tree call, retval;
11470 const char *fmt_str = NULL;
11472 /* Verify the required arguments in the original call. We deal with two
11473 types of sprintf() calls: 'sprintf (str, fmt)' and
11474 'sprintf (dest, "%s", orig)'. */
11475 if (!validate_arg (dest, POINTER_TYPE)
11476 || !validate_arg (fmt, POINTER_TYPE))
11477 return NULL_TREE;
11478 if (orig && !validate_arg (orig, POINTER_TYPE))
11479 return NULL_TREE;
11481 /* Check whether the format is a literal string constant. */
11482 fmt_str = c_getstr (fmt);
11483 if (fmt_str == NULL)
11484 return NULL_TREE;
11486 call = NULL_TREE;
11487 retval = NULL_TREE;
11489 if (!init_target_chars ())
11490 return NULL_TREE;
11492 /* If the format doesn't contain % args or %%, use strcpy. */
11493 if (strchr (fmt_str, target_percent) == NULL)
11495 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11497 if (!fn)
11498 return NULL_TREE;
11500 /* Don't optimize sprintf (buf, "abc", ptr++). */
11501 if (orig)
11502 return NULL_TREE;
11504 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11505 'format' is known to contain no % formats. */
11506 call = build_call_expr (fn, 2, dest, fmt);
11507 if (!ignored)
11508 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11511 /* If the format is "%s", use strcpy if the result isn't used. */
11512 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11514 tree fn;
11515 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11517 if (!fn)
11518 return NULL_TREE;
11520 /* Don't crash on sprintf (str1, "%s"). */
11521 if (!orig)
11522 return NULL_TREE;
11524 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11525 if (!ignored)
11527 retval = c_strlen (orig, 1);
11528 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11529 return NULL_TREE;
11531 call = build_call_expr (fn, 2, dest, orig);
11534 if (call && retval)
11536 retval = fold_convert
11537 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11538 retval);
11539 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11541 else
11542 return call;
11545 /* Expand a call EXP to __builtin_object_size. */
11548 expand_builtin_object_size (tree exp)
11550 tree ost;
11551 int object_size_type;
11552 tree fndecl = get_callee_fndecl (exp);
11554 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11556 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11557 exp, fndecl);
11558 expand_builtin_trap ();
11559 return const0_rtx;
11562 ost = CALL_EXPR_ARG (exp, 1);
11563 STRIP_NOPS (ost);
11565 if (TREE_CODE (ost) != INTEGER_CST
11566 || tree_int_cst_sgn (ost) < 0
11567 || compare_tree_int (ost, 3) > 0)
11569 error ("%Klast argument of %D is not integer constant between 0 and 3",
11570 exp, fndecl);
11571 expand_builtin_trap ();
11572 return const0_rtx;
11575 object_size_type = tree_low_cst (ost, 0);
11577 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11580 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11581 FCODE is the BUILT_IN_* to use.
11582 Return NULL_RTX if we failed; the caller should emit a normal call,
11583 otherwise try to get the result in TARGET, if convenient (and in
11584 mode MODE if that's convenient). */
11586 static rtx
11587 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11588 enum built_in_function fcode)
11590 tree dest, src, len, size;
11592 if (!validate_arglist (exp,
11593 POINTER_TYPE,
11594 fcode == BUILT_IN_MEMSET_CHK
11595 ? INTEGER_TYPE : POINTER_TYPE,
11596 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11597 return NULL_RTX;
11599 dest = CALL_EXPR_ARG (exp, 0);
11600 src = CALL_EXPR_ARG (exp, 1);
11601 len = CALL_EXPR_ARG (exp, 2);
11602 size = CALL_EXPR_ARG (exp, 3);
11604 if (! host_integerp (size, 1))
11605 return NULL_RTX;
11607 if (host_integerp (len, 1) || integer_all_onesp (size))
11609 tree fn;
11611 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11613 warning (0, "%Kcall to %D will always overflow destination buffer",
11614 exp, get_callee_fndecl (exp));
11615 return NULL_RTX;
11618 fn = NULL_TREE;
11619 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11620 mem{cpy,pcpy,move,set} is available. */
11621 switch (fcode)
11623 case BUILT_IN_MEMCPY_CHK:
11624 fn = built_in_decls[BUILT_IN_MEMCPY];
11625 break;
11626 case BUILT_IN_MEMPCPY_CHK:
11627 fn = built_in_decls[BUILT_IN_MEMPCPY];
11628 break;
11629 case BUILT_IN_MEMMOVE_CHK:
11630 fn = built_in_decls[BUILT_IN_MEMMOVE];
11631 break;
11632 case BUILT_IN_MEMSET_CHK:
11633 fn = built_in_decls[BUILT_IN_MEMSET];
11634 break;
11635 default:
11636 break;
11639 if (! fn)
11640 return NULL_RTX;
11642 fn = build_call_expr (fn, 3, dest, src, len);
11643 STRIP_TYPE_NOPS (fn);
11644 while (TREE_CODE (fn) == COMPOUND_EXPR)
11646 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11647 EXPAND_NORMAL);
11648 fn = TREE_OPERAND (fn, 1);
11650 if (TREE_CODE (fn) == CALL_EXPR)
11651 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11652 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11654 else if (fcode == BUILT_IN_MEMSET_CHK)
11655 return NULL_RTX;
11656 else
11658 unsigned int dest_align
11659 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11661 /* If DEST is not a pointer type, call the normal function. */
11662 if (dest_align == 0)
11663 return NULL_RTX;
11665 /* If SRC and DEST are the same (and not volatile), do nothing. */
11666 if (operand_equal_p (src, dest, 0))
11668 tree expr;
11670 if (fcode != BUILT_IN_MEMPCPY_CHK)
11672 /* Evaluate and ignore LEN in case it has side-effects. */
11673 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11674 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11677 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11678 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11681 /* __memmove_chk special case. */
11682 if (fcode == BUILT_IN_MEMMOVE_CHK)
11684 unsigned int src_align
11685 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11687 if (src_align == 0)
11688 return NULL_RTX;
11690 /* If src is categorized for a readonly section we can use
11691 normal __memcpy_chk. */
11692 if (readonly_data_expr (src))
11694 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11695 if (!fn)
11696 return NULL_RTX;
11697 fn = build_call_expr (fn, 4, dest, src, len, size);
11698 STRIP_TYPE_NOPS (fn);
11699 while (TREE_CODE (fn) == COMPOUND_EXPR)
11701 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11702 EXPAND_NORMAL);
11703 fn = TREE_OPERAND (fn, 1);
11705 if (TREE_CODE (fn) == CALL_EXPR)
11706 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11707 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11710 return NULL_RTX;
11714 /* Emit warning if a buffer overflow is detected at compile time. */
11716 static void
11717 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11719 int is_strlen = 0;
11720 tree len, size;
11722 switch (fcode)
11724 case BUILT_IN_STRCPY_CHK:
11725 case BUILT_IN_STPCPY_CHK:
11726 /* For __strcat_chk the warning will be emitted only if overflowing
11727 by at least strlen (dest) + 1 bytes. */
11728 case BUILT_IN_STRCAT_CHK:
11729 len = CALL_EXPR_ARG (exp, 1);
11730 size = CALL_EXPR_ARG (exp, 2);
11731 is_strlen = 1;
11732 break;
11733 case BUILT_IN_STRNCAT_CHK:
11734 case BUILT_IN_STRNCPY_CHK:
11735 len = CALL_EXPR_ARG (exp, 2);
11736 size = CALL_EXPR_ARG (exp, 3);
11737 break;
11738 case BUILT_IN_SNPRINTF_CHK:
11739 case BUILT_IN_VSNPRINTF_CHK:
11740 len = CALL_EXPR_ARG (exp, 1);
11741 size = CALL_EXPR_ARG (exp, 3);
11742 break;
11743 default:
11744 gcc_unreachable ();
11747 if (!len || !size)
11748 return;
11750 if (! host_integerp (size, 1) || integer_all_onesp (size))
11751 return;
11753 if (is_strlen)
11755 len = c_strlen (len, 1);
11756 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11757 return;
11759 else if (fcode == BUILT_IN_STRNCAT_CHK)
11761 tree src = CALL_EXPR_ARG (exp, 1);
11762 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11763 return;
11764 src = c_strlen (src, 1);
11765 if (! src || ! host_integerp (src, 1))
11767 warning (0, "%Kcall to %D might overflow destination buffer",
11768 exp, get_callee_fndecl (exp));
11769 return;
11771 else if (tree_int_cst_lt (src, size))
11772 return;
11774 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11775 return;
11777 warning (0, "%Kcall to %D will always overflow destination buffer",
11778 exp, get_callee_fndecl (exp));
11781 /* Emit warning if a buffer overflow is detected at compile time
11782 in __sprintf_chk/__vsprintf_chk calls. */
11784 static void
11785 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11787 tree dest, size, len, fmt, flag;
11788 const char *fmt_str;
11789 int nargs = call_expr_nargs (exp);
11791 /* Verify the required arguments in the original call. */
11793 if (nargs < 4)
11794 return;
11795 dest = CALL_EXPR_ARG (exp, 0);
11796 flag = CALL_EXPR_ARG (exp, 1);
11797 size = CALL_EXPR_ARG (exp, 2);
11798 fmt = CALL_EXPR_ARG (exp, 3);
11800 if (! host_integerp (size, 1) || integer_all_onesp (size))
11801 return;
11803 /* Check whether the format is a literal string constant. */
11804 fmt_str = c_getstr (fmt);
11805 if (fmt_str == NULL)
11806 return;
11808 if (!init_target_chars ())
11809 return;
11811 /* If the format doesn't contain % args or %%, we know its size. */
11812 if (strchr (fmt_str, target_percent) == 0)
11813 len = build_int_cstu (size_type_node, strlen (fmt_str));
11814 /* If the format is "%s" and first ... argument is a string literal,
11815 we know it too. */
11816 else if (fcode == BUILT_IN_SPRINTF_CHK
11817 && strcmp (fmt_str, target_percent_s) == 0)
11819 tree arg;
11821 if (nargs < 5)
11822 return;
11823 arg = CALL_EXPR_ARG (exp, 4);
11824 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11825 return;
11827 len = c_strlen (arg, 1);
11828 if (!len || ! host_integerp (len, 1))
11829 return;
11831 else
11832 return;
11834 if (! tree_int_cst_lt (len, size))
11836 warning (0, "%Kcall to %D will always overflow destination buffer",
11837 exp, get_callee_fndecl (exp));
11841 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11842 if possible. */
11844 tree
11845 fold_builtin_object_size (tree ptr, tree ost)
11847 tree ret = NULL_TREE;
11848 int object_size_type;
11850 if (!validate_arg (ptr, POINTER_TYPE)
11851 || !validate_arg (ost, INTEGER_TYPE))
11852 return NULL_TREE;
11854 STRIP_NOPS (ost);
11856 if (TREE_CODE (ost) != INTEGER_CST
11857 || tree_int_cst_sgn (ost) < 0
11858 || compare_tree_int (ost, 3) > 0)
11859 return NULL_TREE;
11861 object_size_type = tree_low_cst (ost, 0);
11863 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11864 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11865 and (size_t) 0 for types 2 and 3. */
11866 if (TREE_SIDE_EFFECTS (ptr))
11867 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11869 if (TREE_CODE (ptr) == ADDR_EXPR)
11870 ret = build_int_cstu (size_type_node,
11871 compute_builtin_object_size (ptr, object_size_type));
11873 else if (TREE_CODE (ptr) == SSA_NAME)
11875 unsigned HOST_WIDE_INT bytes;
11877 /* If object size is not known yet, delay folding until
11878 later. Maybe subsequent passes will help determining
11879 it. */
11880 bytes = compute_builtin_object_size (ptr, object_size_type);
11881 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11882 ? -1 : 0))
11883 ret = build_int_cstu (size_type_node, bytes);
11886 if (ret)
11888 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11889 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11890 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11891 ret = NULL_TREE;
11894 return ret;
11897 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11898 DEST, SRC, LEN, and SIZE are the arguments to the call.
11899 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11900 code of the builtin. If MAXLEN is not NULL, it is maximum length
11901 passed as third argument. */
11903 tree
11904 fold_builtin_memory_chk (tree fndecl,
11905 tree dest, tree src, tree len, tree size,
11906 tree maxlen, bool ignore,
11907 enum built_in_function fcode)
11909 tree fn;
11911 if (!validate_arg (dest, POINTER_TYPE)
11912 || !validate_arg (src,
11913 (fcode == BUILT_IN_MEMSET_CHK
11914 ? INTEGER_TYPE : POINTER_TYPE))
11915 || !validate_arg (len, INTEGER_TYPE)
11916 || !validate_arg (size, INTEGER_TYPE))
11917 return NULL_TREE;
11919 /* If SRC and DEST are the same (and not volatile), return DEST
11920 (resp. DEST+LEN for __mempcpy_chk). */
11921 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11923 if (fcode != BUILT_IN_MEMPCPY_CHK)
11924 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11925 else
11927 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11928 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11932 if (! host_integerp (size, 1))
11933 return NULL_TREE;
11935 if (! integer_all_onesp (size))
11937 if (! host_integerp (len, 1))
11939 /* If LEN is not constant, try MAXLEN too.
11940 For MAXLEN only allow optimizing into non-_ocs function
11941 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11942 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11944 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11946 /* (void) __mempcpy_chk () can be optimized into
11947 (void) __memcpy_chk (). */
11948 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11949 if (!fn)
11950 return NULL_TREE;
11952 return build_call_expr (fn, 4, dest, src, len, size);
11954 return NULL_TREE;
11957 else
11958 maxlen = len;
11960 if (tree_int_cst_lt (size, maxlen))
11961 return NULL_TREE;
11964 fn = NULL_TREE;
11965 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11966 mem{cpy,pcpy,move,set} is available. */
11967 switch (fcode)
11969 case BUILT_IN_MEMCPY_CHK:
11970 fn = built_in_decls[BUILT_IN_MEMCPY];
11971 break;
11972 case BUILT_IN_MEMPCPY_CHK:
11973 fn = built_in_decls[BUILT_IN_MEMPCPY];
11974 break;
11975 case BUILT_IN_MEMMOVE_CHK:
11976 fn = built_in_decls[BUILT_IN_MEMMOVE];
11977 break;
11978 case BUILT_IN_MEMSET_CHK:
11979 fn = built_in_decls[BUILT_IN_MEMSET];
11980 break;
11981 default:
11982 break;
11985 if (!fn)
11986 return NULL_TREE;
11988 return build_call_expr (fn, 3, dest, src, len);
11991 /* Fold a call to the __st[rp]cpy_chk builtin.
11992 DEST, SRC, and SIZE are the arguments to the call.
11993 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11994 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11995 strings passed as second argument. */
11997 tree
11998 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11999 tree maxlen, bool ignore,
12000 enum built_in_function fcode)
12002 tree len, fn;
12004 if (!validate_arg (dest, POINTER_TYPE)
12005 || !validate_arg (src, POINTER_TYPE)
12006 || !validate_arg (size, INTEGER_TYPE))
12007 return NULL_TREE;
12009 /* If SRC and DEST are the same (and not volatile), return DEST. */
12010 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12011 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12013 if (! host_integerp (size, 1))
12014 return NULL_TREE;
12016 if (! integer_all_onesp (size))
12018 len = c_strlen (src, 1);
12019 if (! len || ! host_integerp (len, 1))
12021 /* If LEN is not constant, try MAXLEN too.
12022 For MAXLEN only allow optimizing into non-_ocs function
12023 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12024 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12026 if (fcode == BUILT_IN_STPCPY_CHK)
12028 if (! ignore)
12029 return NULL_TREE;
12031 /* If return value of __stpcpy_chk is ignored,
12032 optimize into __strcpy_chk. */
12033 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12034 if (!fn)
12035 return NULL_TREE;
12037 return build_call_expr (fn, 3, dest, src, size);
12040 if (! len || TREE_SIDE_EFFECTS (len))
12041 return NULL_TREE;
12043 /* If c_strlen returned something, but not a constant,
12044 transform __strcpy_chk into __memcpy_chk. */
12045 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12046 if (!fn)
12047 return NULL_TREE;
12049 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12050 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12051 build_call_expr (fn, 4,
12052 dest, src, len, size));
12055 else
12056 maxlen = len;
12058 if (! tree_int_cst_lt (maxlen, size))
12059 return NULL_TREE;
12062 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12063 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12064 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12065 if (!fn)
12066 return NULL_TREE;
12068 return build_call_expr (fn, 2, dest, src);
12071 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12072 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12073 length passed as third argument. */
12075 tree
12076 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12077 tree maxlen)
12079 tree fn;
12081 if (!validate_arg (dest, POINTER_TYPE)
12082 || !validate_arg (src, POINTER_TYPE)
12083 || !validate_arg (len, INTEGER_TYPE)
12084 || !validate_arg (size, INTEGER_TYPE))
12085 return NULL_TREE;
12087 if (! host_integerp (size, 1))
12088 return NULL_TREE;
12090 if (! integer_all_onesp (size))
12092 if (! host_integerp (len, 1))
12094 /* If LEN is not constant, try MAXLEN too.
12095 For MAXLEN only allow optimizing into non-_ocs function
12096 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12097 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12098 return NULL_TREE;
12100 else
12101 maxlen = len;
12103 if (tree_int_cst_lt (size, maxlen))
12104 return NULL_TREE;
12107 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12108 fn = built_in_decls[BUILT_IN_STRNCPY];
12109 if (!fn)
12110 return NULL_TREE;
12112 return build_call_expr (fn, 3, dest, src, len);
12115 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12116 are the arguments to the call. */
12118 static tree
12119 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12121 tree fn;
12122 const char *p;
12124 if (!validate_arg (dest, POINTER_TYPE)
12125 || !validate_arg (src, POINTER_TYPE)
12126 || !validate_arg (size, INTEGER_TYPE))
12127 return NULL_TREE;
12129 p = c_getstr (src);
12130 /* If the SRC parameter is "", return DEST. */
12131 if (p && *p == '\0')
12132 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12134 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12135 return NULL_TREE;
12137 /* If __builtin_strcat_chk is used, assume strcat is available. */
12138 fn = built_in_decls[BUILT_IN_STRCAT];
12139 if (!fn)
12140 return NULL_TREE;
12142 return build_call_expr (fn, 2, dest, src);
12145 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12146 LEN, and SIZE. */
12148 static tree
12149 fold_builtin_strncat_chk (tree fndecl,
12150 tree dest, tree src, tree len, tree size)
12152 tree fn;
12153 const char *p;
12155 if (!validate_arg (dest, POINTER_TYPE)
12156 || !validate_arg (src, POINTER_TYPE)
12157 || !validate_arg (size, INTEGER_TYPE)
12158 || !validate_arg (size, INTEGER_TYPE))
12159 return NULL_TREE;
12161 p = c_getstr (src);
12162 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12163 if (p && *p == '\0')
12164 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12165 else if (integer_zerop (len))
12166 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12168 if (! host_integerp (size, 1))
12169 return NULL_TREE;
12171 if (! integer_all_onesp (size))
12173 tree src_len = c_strlen (src, 1);
12174 if (src_len
12175 && host_integerp (src_len, 1)
12176 && host_integerp (len, 1)
12177 && ! tree_int_cst_lt (len, src_len))
12179 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12180 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12181 if (!fn)
12182 return NULL_TREE;
12184 return build_call_expr (fn, 3, dest, src, size);
12186 return NULL_TREE;
12189 /* If __builtin_strncat_chk is used, assume strncat is available. */
12190 fn = built_in_decls[BUILT_IN_STRNCAT];
12191 if (!fn)
12192 return NULL_TREE;
12194 return build_call_expr (fn, 3, dest, src, len);
12197 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12198 a normal call should be emitted rather than expanding the function
12199 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12201 static tree
12202 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12204 tree dest, size, len, fn, fmt, flag;
12205 const char *fmt_str;
12206 int nargs = call_expr_nargs (exp);
12208 /* Verify the required arguments in the original call. */
12209 if (nargs < 4)
12210 return NULL_TREE;
12211 dest = CALL_EXPR_ARG (exp, 0);
12212 if (!validate_arg (dest, POINTER_TYPE))
12213 return NULL_TREE;
12214 flag = CALL_EXPR_ARG (exp, 1);
12215 if (!validate_arg (flag, INTEGER_TYPE))
12216 return NULL_TREE;
12217 size = CALL_EXPR_ARG (exp, 2);
12218 if (!validate_arg (size, INTEGER_TYPE))
12219 return NULL_TREE;
12220 fmt = CALL_EXPR_ARG (exp, 3);
12221 if (!validate_arg (fmt, POINTER_TYPE))
12222 return NULL_TREE;
12224 if (! host_integerp (size, 1))
12225 return NULL_TREE;
12227 len = NULL_TREE;
12229 if (!init_target_chars ())
12230 return NULL_TREE;
12232 /* Check whether the format is a literal string constant. */
12233 fmt_str = c_getstr (fmt);
12234 if (fmt_str != NULL)
12236 /* If the format doesn't contain % args or %%, we know the size. */
12237 if (strchr (fmt_str, target_percent) == 0)
12239 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12240 len = build_int_cstu (size_type_node, strlen (fmt_str));
12242 /* If the format is "%s" and first ... argument is a string literal,
12243 we know the size too. */
12244 else if (fcode == BUILT_IN_SPRINTF_CHK
12245 && strcmp (fmt_str, target_percent_s) == 0)
12247 tree arg;
12249 if (nargs == 5)
12251 arg = CALL_EXPR_ARG (exp, 4);
12252 if (validate_arg (arg, POINTER_TYPE))
12254 len = c_strlen (arg, 1);
12255 if (! len || ! host_integerp (len, 1))
12256 len = NULL_TREE;
12262 if (! integer_all_onesp (size))
12264 if (! len || ! tree_int_cst_lt (len, size))
12265 return NULL_TREE;
12268 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12269 or if format doesn't contain % chars or is "%s". */
12270 if (! integer_zerop (flag))
12272 if (fmt_str == NULL)
12273 return NULL_TREE;
12274 if (strchr (fmt_str, target_percent) != NULL
12275 && strcmp (fmt_str, target_percent_s))
12276 return NULL_TREE;
12279 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12280 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12281 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12282 if (!fn)
12283 return NULL_TREE;
12285 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12288 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12289 a normal call should be emitted rather than expanding the function
12290 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12291 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12292 passed as second argument. */
12294 tree
12295 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12296 enum built_in_function fcode)
12298 tree dest, size, len, fn, fmt, flag;
12299 const char *fmt_str;
12301 /* Verify the required arguments in the original call. */
12302 if (call_expr_nargs (exp) < 5)
12303 return NULL_TREE;
12304 dest = CALL_EXPR_ARG (exp, 0);
12305 if (!validate_arg (dest, POINTER_TYPE))
12306 return NULL_TREE;
12307 len = CALL_EXPR_ARG (exp, 1);
12308 if (!validate_arg (len, INTEGER_TYPE))
12309 return NULL_TREE;
12310 flag = CALL_EXPR_ARG (exp, 2);
12311 if (!validate_arg (flag, INTEGER_TYPE))
12312 return NULL_TREE;
12313 size = CALL_EXPR_ARG (exp, 3);
12314 if (!validate_arg (size, INTEGER_TYPE))
12315 return NULL_TREE;
12316 fmt = CALL_EXPR_ARG (exp, 4);
12317 if (!validate_arg (fmt, POINTER_TYPE))
12318 return NULL_TREE;
12320 if (! host_integerp (size, 1))
12321 return NULL_TREE;
12323 if (! integer_all_onesp (size))
12325 if (! host_integerp (len, 1))
12327 /* If LEN is not constant, try MAXLEN too.
12328 For MAXLEN only allow optimizing into non-_ocs function
12329 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12330 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12331 return NULL_TREE;
12333 else
12334 maxlen = len;
12336 if (tree_int_cst_lt (size, maxlen))
12337 return NULL_TREE;
12340 if (!init_target_chars ())
12341 return NULL_TREE;
12343 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12344 or if format doesn't contain % chars or is "%s". */
12345 if (! integer_zerop (flag))
12347 fmt_str = c_getstr (fmt);
12348 if (fmt_str == NULL)
12349 return NULL_TREE;
12350 if (strchr (fmt_str, target_percent) != NULL
12351 && strcmp (fmt_str, target_percent_s))
12352 return NULL_TREE;
12355 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12356 available. */
12357 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12358 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12359 if (!fn)
12360 return NULL_TREE;
12362 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12365 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12366 FMT and ARG are the arguments to the call; we don't fold cases with
12367 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12369 Return NULL_TREE if no simplification was possible, otherwise return the
12370 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12371 code of the function to be simplified. */
12373 static tree
12374 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12375 enum built_in_function fcode)
12377 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12378 const char *fmt_str = NULL;
12380 /* If the return value is used, don't do the transformation. */
12381 if (! ignore)
12382 return NULL_TREE;
12384 /* Verify the required arguments in the original call. */
12385 if (!validate_arg (fmt, POINTER_TYPE))
12386 return NULL_TREE;
12388 /* Check whether the format is a literal string constant. */
12389 fmt_str = c_getstr (fmt);
12390 if (fmt_str == NULL)
12391 return NULL_TREE;
12393 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12395 /* If we're using an unlocked function, assume the other
12396 unlocked functions exist explicitly. */
12397 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12398 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12400 else
12402 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12403 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12406 if (!init_target_chars ())
12407 return NULL_TREE;
12409 if (strcmp (fmt_str, target_percent_s) == 0
12410 || strchr (fmt_str, target_percent) == NULL)
12412 const char *str;
12414 if (strcmp (fmt_str, target_percent_s) == 0)
12416 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12417 return NULL_TREE;
12419 if (!arg || !validate_arg (arg, POINTER_TYPE))
12420 return NULL_TREE;
12422 str = c_getstr (arg);
12423 if (str == NULL)
12424 return NULL_TREE;
12426 else
12428 /* The format specifier doesn't contain any '%' characters. */
12429 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12430 && arg)
12431 return NULL_TREE;
12432 str = fmt_str;
12435 /* If the string was "", printf does nothing. */
12436 if (str[0] == '\0')
12437 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12439 /* If the string has length of 1, call putchar. */
12440 if (str[1] == '\0')
12442 /* Given printf("c"), (where c is any one character,)
12443 convert "c"[0] to an int and pass that to the replacement
12444 function. */
12445 newarg = build_int_cst (NULL_TREE, str[0]);
12446 if (fn_putchar)
12447 call = build_call_expr (fn_putchar, 1, newarg);
12449 else
12451 /* If the string was "string\n", call puts("string"). */
12452 size_t len = strlen (str);
12453 if ((unsigned char)str[len - 1] == target_newline)
12455 /* Create a NUL-terminated string that's one char shorter
12456 than the original, stripping off the trailing '\n'. */
12457 char *newstr = alloca (len);
12458 memcpy (newstr, str, len - 1);
12459 newstr[len - 1] = 0;
12461 newarg = build_string_literal (len, newstr);
12462 if (fn_puts)
12463 call = build_call_expr (fn_puts, 1, newarg);
12465 else
12466 /* We'd like to arrange to call fputs(string,stdout) here,
12467 but we need stdout and don't have a way to get it yet. */
12468 return NULL_TREE;
12472 /* The other optimizations can be done only on the non-va_list variants. */
12473 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12474 return NULL_TREE;
12476 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12477 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12479 if (!arg || !validate_arg (arg, POINTER_TYPE))
12480 return NULL_TREE;
12481 if (fn_puts)
12482 call = build_call_expr (fn_puts, 1, arg);
12485 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12486 else if (strcmp (fmt_str, target_percent_c) == 0)
12488 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12489 return NULL_TREE;
12490 if (fn_putchar)
12491 call = build_call_expr (fn_putchar, 1, arg);
12494 if (!call)
12495 return NULL_TREE;
12497 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12500 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12501 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12502 more than 3 arguments, and ARG may be null in the 2-argument case.
12504 Return NULL_TREE if no simplification was possible, otherwise return the
12505 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12506 code of the function to be simplified. */
12508 static tree
12509 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12510 enum built_in_function fcode)
12512 tree fn_fputc, fn_fputs, call = NULL_TREE;
12513 const char *fmt_str = NULL;
12515 /* If the return value is used, don't do the transformation. */
12516 if (! ignore)
12517 return NULL_TREE;
12519 /* Verify the required arguments in the original call. */
12520 if (!validate_arg (fp, POINTER_TYPE))
12521 return NULL_TREE;
12522 if (!validate_arg (fmt, POINTER_TYPE))
12523 return NULL_TREE;
12525 /* Check whether the format is a literal string constant. */
12526 fmt_str = c_getstr (fmt);
12527 if (fmt_str == NULL)
12528 return NULL_TREE;
12530 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12532 /* If we're using an unlocked function, assume the other
12533 unlocked functions exist explicitly. */
12534 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12535 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12537 else
12539 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12540 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12543 if (!init_target_chars ())
12544 return NULL_TREE;
12546 /* If the format doesn't contain % args or %%, use strcpy. */
12547 if (strchr (fmt_str, target_percent) == NULL)
12549 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12550 && arg)
12551 return NULL_TREE;
12553 /* If the format specifier was "", fprintf does nothing. */
12554 if (fmt_str[0] == '\0')
12556 /* If FP has side-effects, just wait until gimplification is
12557 done. */
12558 if (TREE_SIDE_EFFECTS (fp))
12559 return NULL_TREE;
12561 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12564 /* When "string" doesn't contain %, replace all cases of
12565 fprintf (fp, string) with fputs (string, fp). The fputs
12566 builtin will take care of special cases like length == 1. */
12567 if (fn_fputs)
12568 call = build_call_expr (fn_fputs, 2, fmt, fp);
12571 /* The other optimizations can be done only on the non-va_list variants. */
12572 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12573 return NULL_TREE;
12575 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12576 else if (strcmp (fmt_str, target_percent_s) == 0)
12578 if (!arg || !validate_arg (arg, POINTER_TYPE))
12579 return NULL_TREE;
12580 if (fn_fputs)
12581 call = build_call_expr (fn_fputs, 2, arg, fp);
12584 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12585 else if (strcmp (fmt_str, target_percent_c) == 0)
12587 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12588 return NULL_TREE;
12589 if (fn_fputc)
12590 call = build_call_expr (fn_fputc, 2, arg, fp);
12593 if (!call)
12594 return NULL_TREE;
12595 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12598 /* Initialize format string characters in the target charset. */
12600 static bool
12601 init_target_chars (void)
12603 static bool init;
12604 if (!init)
12606 target_newline = lang_hooks.to_target_charset ('\n');
12607 target_percent = lang_hooks.to_target_charset ('%');
12608 target_c = lang_hooks.to_target_charset ('c');
12609 target_s = lang_hooks.to_target_charset ('s');
12610 if (target_newline == 0 || target_percent == 0 || target_c == 0
12611 || target_s == 0)
12612 return false;
12614 target_percent_c[0] = target_percent;
12615 target_percent_c[1] = target_c;
12616 target_percent_c[2] = '\0';
12618 target_percent_s[0] = target_percent;
12619 target_percent_s[1] = target_s;
12620 target_percent_s[2] = '\0';
12622 target_percent_s_newline[0] = target_percent;
12623 target_percent_s_newline[1] = target_s;
12624 target_percent_s_newline[2] = target_newline;
12625 target_percent_s_newline[3] = '\0';
12627 init = true;
12629 return true;
12632 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12633 and no overflow/underflow occurred. INEXACT is true if M was not
12634 exactly calculated. TYPE is the tree type for the result. This
12635 function assumes that you cleared the MPFR flags and then
12636 calculated M to see if anything subsequently set a flag prior to
12637 entering this function. Return NULL_TREE if any checks fail. */
12639 static tree
12640 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12642 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12643 overflow/underflow occurred. If -frounding-math, proceed iff the
12644 result of calling FUNC was exact. */
12645 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12646 && (!flag_rounding_math || !inexact))
12648 REAL_VALUE_TYPE rr;
12650 real_from_mpfr (&rr, m, type, GMP_RNDN);
12651 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12652 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12653 but the mpft_t is not, then we underflowed in the
12654 conversion. */
12655 if (real_isfinite (&rr)
12656 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12658 REAL_VALUE_TYPE rmode;
12660 real_convert (&rmode, TYPE_MODE (type), &rr);
12661 /* Proceed iff the specified mode can hold the value. */
12662 if (real_identical (&rmode, &rr))
12663 return build_real (type, rmode);
12666 return NULL_TREE;
12669 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12670 FUNC on it and return the resulting value as a tree with type TYPE.
12671 If MIN and/or MAX are not NULL, then the supplied ARG must be
12672 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12673 acceptable values, otherwise they are not. The mpfr precision is
12674 set to the precision of TYPE. We assume that function FUNC returns
12675 zero if the result could be calculated exactly within the requested
12676 precision. */
12678 static tree
12679 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12680 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12681 bool inclusive)
12683 tree result = NULL_TREE;
12685 STRIP_NOPS (arg);
12687 /* To proceed, MPFR must exactly represent the target floating point
12688 format, which only happens when the target base equals two. */
12689 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12690 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12692 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12694 if (real_isfinite (ra)
12695 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12696 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12698 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12699 int inexact;
12700 mpfr_t m;
12702 mpfr_init2 (m, prec);
12703 mpfr_from_real (m, ra, GMP_RNDN);
12704 mpfr_clear_flags ();
12705 inexact = func (m, m, GMP_RNDN);
12706 result = do_mpfr_ckconv (m, type, inexact);
12707 mpfr_clear (m);
12711 return result;
12714 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12715 FUNC on it and return the resulting value as a tree with type TYPE.
12716 The mpfr precision is set to the precision of TYPE. We assume that
12717 function FUNC returns zero if the result could be calculated
12718 exactly within the requested precision. */
12720 static tree
12721 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12722 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12724 tree result = NULL_TREE;
12726 STRIP_NOPS (arg1);
12727 STRIP_NOPS (arg2);
12729 /* To proceed, MPFR must exactly represent the target floating point
12730 format, which only happens when the target base equals two. */
12731 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12732 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12733 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12735 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12736 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12738 if (real_isfinite (ra1) && real_isfinite (ra2))
12740 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12741 int inexact;
12742 mpfr_t m1, m2;
12744 mpfr_inits2 (prec, m1, m2, NULL);
12745 mpfr_from_real (m1, ra1, GMP_RNDN);
12746 mpfr_from_real (m2, ra2, GMP_RNDN);
12747 mpfr_clear_flags ();
12748 inexact = func (m1, m1, m2, GMP_RNDN);
12749 result = do_mpfr_ckconv (m1, type, inexact);
12750 mpfr_clears (m1, m2, NULL);
12754 return result;
12757 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12758 FUNC on it and return the resulting value as a tree with type TYPE.
12759 The mpfr precision is set to the precision of TYPE. We assume that
12760 function FUNC returns zero if the result could be calculated
12761 exactly within the requested precision. */
12763 static tree
12764 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12765 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12767 tree result = NULL_TREE;
12769 STRIP_NOPS (arg1);
12770 STRIP_NOPS (arg2);
12771 STRIP_NOPS (arg3);
12773 /* To proceed, MPFR must exactly represent the target floating point
12774 format, which only happens when the target base equals two. */
12775 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12776 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12777 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12778 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12780 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12781 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12782 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12784 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12786 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12787 int inexact;
12788 mpfr_t m1, m2, m3;
12790 mpfr_inits2 (prec, m1, m2, m3, NULL);
12791 mpfr_from_real (m1, ra1, GMP_RNDN);
12792 mpfr_from_real (m2, ra2, GMP_RNDN);
12793 mpfr_from_real (m3, ra3, GMP_RNDN);
12794 mpfr_clear_flags ();
12795 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12796 result = do_mpfr_ckconv (m1, type, inexact);
12797 mpfr_clears (m1, m2, m3, NULL);
12801 return result;
12804 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12805 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12806 If ARG_SINP and ARG_COSP are NULL then the result is returned
12807 as a complex value.
12808 The type is taken from the type of ARG and is used for setting the
12809 precision of the calculation and results. */
12811 static tree
12812 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12814 tree const type = TREE_TYPE (arg);
12815 tree result = NULL_TREE;
12817 STRIP_NOPS (arg);
12819 /* To proceed, MPFR must exactly represent the target floating point
12820 format, which only happens when the target base equals two. */
12821 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12822 && TREE_CODE (arg) == REAL_CST
12823 && !TREE_OVERFLOW (arg))
12825 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12827 if (real_isfinite (ra))
12829 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12830 tree result_s, result_c;
12831 int inexact;
12832 mpfr_t m, ms, mc;
12834 mpfr_inits2 (prec, m, ms, mc, NULL);
12835 mpfr_from_real (m, ra, GMP_RNDN);
12836 mpfr_clear_flags ();
12837 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12838 result_s = do_mpfr_ckconv (ms, type, inexact);
12839 result_c = do_mpfr_ckconv (mc, type, inexact);
12840 mpfr_clears (m, ms, mc, NULL);
12841 if (result_s && result_c)
12843 /* If we are to return in a complex value do so. */
12844 if (!arg_sinp && !arg_cosp)
12845 return build_complex (build_complex_type (type),
12846 result_c, result_s);
12848 /* Dereference the sin/cos pointer arguments. */
12849 arg_sinp = build_fold_indirect_ref (arg_sinp);
12850 arg_cosp = build_fold_indirect_ref (arg_cosp);
12851 /* Proceed if valid pointer type were passed in. */
12852 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12853 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12855 /* Set the values. */
12856 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12857 result_s);
12858 TREE_SIDE_EFFECTS (result_s) = 1;
12859 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12860 result_c);
12861 TREE_SIDE_EFFECTS (result_c) = 1;
12862 /* Combine the assignments into a compound expr. */
12863 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12864 result_s, result_c));
12869 return result;
12872 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12873 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12874 two-argument mpfr order N Bessel function FUNC on them and return
12875 the resulting value as a tree with type TYPE. The mpfr precision
12876 is set to the precision of TYPE. We assume that function FUNC
12877 returns zero if the result could be calculated exactly within the
12878 requested precision. */
12879 static tree
12880 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12881 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12882 const REAL_VALUE_TYPE *min, bool inclusive)
12884 tree result = NULL_TREE;
12886 STRIP_NOPS (arg1);
12887 STRIP_NOPS (arg2);
12889 /* To proceed, MPFR must exactly represent the target floating point
12890 format, which only happens when the target base equals two. */
12891 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12892 && host_integerp (arg1, 0)
12893 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12895 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12896 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12898 if (n == (long)n
12899 && real_isfinite (ra)
12900 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12902 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12903 int inexact;
12904 mpfr_t m;
12906 mpfr_init2 (m, prec);
12907 mpfr_from_real (m, ra, GMP_RNDN);
12908 mpfr_clear_flags ();
12909 inexact = func (m, n, m, GMP_RNDN);
12910 result = do_mpfr_ckconv (m, type, inexact);
12911 mpfr_clear (m);
12915 return result;
12918 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12919 the pointer *(ARG_QUO) and return the result. The type is taken
12920 from the type of ARG0 and is used for setting the precision of the
12921 calculation and results. */
12923 static tree
12924 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12926 tree const type = TREE_TYPE (arg0);
12927 tree result = NULL_TREE;
12929 STRIP_NOPS (arg0);
12930 STRIP_NOPS (arg1);
12932 /* To proceed, MPFR must exactly represent the target floating point
12933 format, which only happens when the target base equals two. */
12934 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12935 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12936 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12938 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12939 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12941 if (real_isfinite (ra0) && real_isfinite (ra1))
12943 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12944 tree result_rem;
12945 long integer_quo;
12946 mpfr_t m0, m1;
12948 mpfr_inits2 (prec, m0, m1, NULL);
12949 mpfr_from_real (m0, ra0, GMP_RNDN);
12950 mpfr_from_real (m1, ra1, GMP_RNDN);
12951 mpfr_clear_flags ();
12952 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12953 /* Remquo is independent of the rounding mode, so pass
12954 inexact=0 to do_mpfr_ckconv(). */
12955 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12956 mpfr_clears (m0, m1, NULL);
12957 if (result_rem)
12959 /* MPFR calculates quo in the host's long so it may
12960 return more bits in quo than the target int can hold
12961 if sizeof(host long) > sizeof(target int). This can
12962 happen even for native compilers in LP64 mode. In
12963 these cases, modulo the quo value with the largest
12964 number that the target int can hold while leaving one
12965 bit for the sign. */
12966 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12967 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12969 /* Dereference the quo pointer argument. */
12970 arg_quo = build_fold_indirect_ref (arg_quo);
12971 /* Proceed iff a valid pointer type was passed in. */
12972 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12974 /* Set the value. */
12975 tree result_quo = fold_build2 (MODIFY_EXPR,
12976 TREE_TYPE (arg_quo), arg_quo,
12977 build_int_cst (NULL, integer_quo));
12978 TREE_SIDE_EFFECTS (result_quo) = 1;
12979 /* Combine the quo assignment with the rem. */
12980 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12981 result_quo, result_rem));
12986 return result;
12989 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12990 resulting value as a tree with type TYPE. The mpfr precision is
12991 set to the precision of TYPE. We assume that this mpfr function
12992 returns zero if the result could be calculated exactly within the
12993 requested precision. In addition, the integer pointer represented
12994 by ARG_SG will be dereferenced and set to the appropriate signgam
12995 (-1,1) value. */
12997 static tree
12998 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13000 tree result = NULL_TREE;
13002 STRIP_NOPS (arg);
13004 /* To proceed, MPFR must exactly represent the target floating point
13005 format, which only happens when the target base equals two. Also
13006 verify ARG is a constant and that ARG_SG is an int pointer. */
13007 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13008 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13009 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13010 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13012 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13014 /* In addition to NaN and Inf, the argument cannot be zero or a
13015 negative integer. */
13016 if (real_isfinite (ra)
13017 && ra->cl != rvc_zero
13018 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13020 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13021 int inexact, sg;
13022 mpfr_t m;
13023 tree result_lg;
13025 mpfr_init2 (m, prec);
13026 mpfr_from_real (m, ra, GMP_RNDN);
13027 mpfr_clear_flags ();
13028 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
13029 result_lg = do_mpfr_ckconv (m, type, inexact);
13030 mpfr_clear (m);
13031 if (result_lg)
13033 tree result_sg;
13035 /* Dereference the arg_sg pointer argument. */
13036 arg_sg = build_fold_indirect_ref (arg_sg);
13037 /* Assign the signgam value into *arg_sg. */
13038 result_sg = fold_build2 (MODIFY_EXPR,
13039 TREE_TYPE (arg_sg), arg_sg,
13040 build_int_cst (NULL, sg));
13041 TREE_SIDE_EFFECTS (result_sg) = 1;
13042 /* Combine the signgam assignment with the lgamma result. */
13043 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13044 result_sg, result_lg));
13049 return result;
13051 #endif