dma: bump man page date
[dragonfly.git] / contrib / gcc-4.4 / gcc / builtins.c
blob9dee9a35abbaeab34976b38046b93b207e51dc4d
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[4]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names[(int) END_BUILTINS] =
69 #include "builtins.def"
71 #undef DEF_BUILTIN
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 tree built_in_decls[(int) END_BUILTINS];
76 /* Declarations used when constructing the builtin implicitly in the compiler.
77 It may be NULL_TREE when this is invalid (for instance runtime is not
78 required to implement the function call in all cases). */
79 tree implicit_built_in_decls[(int) END_BUILTINS];
81 static const char *c_getstr (tree);
82 static rtx c_readstr (const char *, enum machine_mode);
83 static int target_char_cast (tree, char *);
84 static rtx get_memory_rtx (tree, tree);
85 static int apply_args_size (void);
86 static int apply_result_size (void);
87 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
88 static rtx result_vector (int, rtx);
89 #endif
90 static void expand_builtin_update_setjmp_buf (rtx);
91 static void expand_builtin_prefetch (tree);
92 static rtx expand_builtin_apply_args (void);
93 static rtx expand_builtin_apply_args_1 (void);
94 static rtx expand_builtin_apply (rtx, rtx, rtx);
95 static void expand_builtin_return (rtx);
96 static enum type_class type_to_class (tree);
97 static rtx expand_builtin_classify_type (tree);
98 static void expand_errno_check (tree, rtx);
99 static rtx expand_builtin_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_args_info (tree);
108 static rtx expand_builtin_next_arg (void);
109 static rtx expand_builtin_va_start (tree);
110 static rtx expand_builtin_va_end (tree);
111 static rtx expand_builtin_va_copy (tree);
112 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
126 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_bcopy (tree, int);
129 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
131 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
133 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
134 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_alloca (tree, rtx);
143 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
144 static rtx expand_builtin_frame_address (tree, tree);
145 static rtx expand_builtin_fputs (tree, rtx, bool);
146 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
147 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
148 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
149 static tree stabilize_va_list (tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static tree fold_builtin_constant_p (tree);
152 static tree fold_builtin_expect (tree, tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (tree);
155 static tree fold_builtin_inf (tree, int);
156 static tree fold_builtin_nan (tree, tree, int);
157 static tree rewrite_call_expr (tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static bool integer_valued_real_p (tree);
160 static tree fold_trunc_transparent_mathfn (tree, tree);
161 static bool readonly_data_expr (tree);
162 static rtx expand_builtin_fabs (tree, rtx, rtx);
163 static rtx expand_builtin_signbit (tree, rtx);
164 static tree fold_builtin_sqrt (tree, tree);
165 static tree fold_builtin_cbrt (tree, tree);
166 static tree fold_builtin_pow (tree, tree, tree, tree);
167 static tree fold_builtin_powi (tree, tree, tree, tree);
168 static tree fold_builtin_cos (tree, tree, tree);
169 static tree fold_builtin_cosh (tree, tree, tree);
170 static tree fold_builtin_tan (tree, tree);
171 static tree fold_builtin_trunc (tree, tree);
172 static tree fold_builtin_floor (tree, tree);
173 static tree fold_builtin_ceil (tree, tree);
174 static tree fold_builtin_round (tree, tree);
175 static tree fold_builtin_int_roundingfn (tree, tree);
176 static tree fold_builtin_bitop (tree, tree);
177 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
178 static tree fold_builtin_strchr (tree, tree, tree);
179 static tree fold_builtin_memchr (tree, tree, tree, tree);
180 static tree fold_builtin_memcmp (tree, tree, tree);
181 static tree fold_builtin_strcmp (tree, tree);
182 static tree fold_builtin_strncmp (tree, tree, tree);
183 static tree fold_builtin_signbit (tree, tree);
184 static tree fold_builtin_copysign (tree, tree, tree, tree);
185 static tree fold_builtin_isascii (tree);
186 static tree fold_builtin_toascii (tree);
187 static tree fold_builtin_isdigit (tree);
188 static tree fold_builtin_fabs (tree, tree);
189 static tree fold_builtin_abs (tree, tree);
190 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
191 enum tree_code);
192 static tree fold_builtin_n (tree, tree *, int, bool);
193 static tree fold_builtin_0 (tree, bool);
194 static tree fold_builtin_1 (tree, tree, bool);
195 static tree fold_builtin_2 (tree, tree, tree, bool);
196 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
197 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
198 static tree fold_builtin_varargs (tree, tree, bool);
200 static tree fold_builtin_strpbrk (tree, tree, tree);
201 static tree fold_builtin_strstr (tree, tree, tree);
202 static tree fold_builtin_strrchr (tree, tree, tree);
203 static tree fold_builtin_strcat (tree, tree);
204 static tree fold_builtin_strncat (tree, tree, tree);
205 static tree fold_builtin_strspn (tree, tree);
206 static tree fold_builtin_strcspn (tree, tree);
207 static tree fold_builtin_sprintf (tree, tree, tree, int);
209 static rtx expand_builtin_object_size (tree);
210 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
211 enum built_in_function);
212 static void maybe_emit_chk_warning (tree, enum built_in_function);
213 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
214 static void maybe_emit_free_warning (tree);
215 static tree fold_builtin_object_size (tree, tree);
216 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
217 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
218 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
219 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
220 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
221 enum built_in_function);
222 static bool init_target_chars (void);
224 static unsigned HOST_WIDE_INT target_newline;
225 static unsigned HOST_WIDE_INT target_percent;
226 static unsigned HOST_WIDE_INT target_c;
227 static unsigned HOST_WIDE_INT target_s;
228 static char target_percent_c[3];
229 static char target_percent_s[3];
230 static char target_percent_s_newline[4];
231 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
232 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
233 static tree do_mpfr_arg2 (tree, tree, tree,
234 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
235 static tree do_mpfr_arg3 (tree, tree, tree, tree,
236 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
237 static tree do_mpfr_sincos (tree, tree, tree);
238 static tree do_mpfr_bessel_n (tree, tree, tree,
239 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
240 const REAL_VALUE_TYPE *, bool);
241 static tree do_mpfr_remquo (tree, tree, tree);
242 static tree do_mpfr_lgamma_r (tree, tree, tree);
244 /* Return true if NODE should be considered for inline expansion regardless
245 of the optimization level. This means whenever a function is invoked with
246 its "internal" name, which normally contains the prefix "__builtin". */
248 static bool called_as_built_in (tree node)
250 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
251 if (strncmp (name, "__builtin_", 10) == 0)
252 return true;
253 if (strncmp (name, "__sync_", 7) == 0)
254 return true;
255 return false;
258 /* Return the alignment in bits of EXP, an object.
259 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
260 guessed alignment e.g. from type alignment. */
263 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
265 unsigned int inner;
267 inner = max_align;
268 if (handled_component_p (exp))
270 HOST_WIDE_INT bitsize, bitpos;
271 tree offset;
272 enum machine_mode mode;
273 int unsignedp, volatilep;
275 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
276 &mode, &unsignedp, &volatilep, true);
277 if (bitpos)
278 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
279 while (offset)
281 tree next_offset;
283 if (TREE_CODE (offset) == PLUS_EXPR)
285 next_offset = TREE_OPERAND (offset, 0);
286 offset = TREE_OPERAND (offset, 1);
288 else
289 next_offset = NULL;
290 if (host_integerp (offset, 1))
292 /* Any overflow in calculating offset_bits won't change
293 the alignment. */
294 unsigned offset_bits
295 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
297 if (offset_bits)
298 inner = MIN (inner, (offset_bits & -offset_bits));
300 else if (TREE_CODE (offset) == MULT_EXPR
301 && host_integerp (TREE_OPERAND (offset, 1), 1))
303 /* Any overflow in calculating offset_factor won't change
304 the alignment. */
305 unsigned offset_factor
306 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
307 * BITS_PER_UNIT);
309 if (offset_factor)
310 inner = MIN (inner, (offset_factor & -offset_factor));
312 else
314 inner = MIN (inner, BITS_PER_UNIT);
315 break;
317 offset = next_offset;
320 if (DECL_P (exp))
321 align = MIN (inner, DECL_ALIGN (exp));
322 #ifdef CONSTANT_ALIGNMENT
323 else if (CONSTANT_CLASS_P (exp))
324 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
325 #endif
326 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
327 || TREE_CODE (exp) == INDIRECT_REF)
328 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
329 else
330 align = MIN (align, inner);
331 return MIN (align, max_align);
334 /* Return the alignment in bits of EXP, a pointer valued expression.
335 But don't return more than MAX_ALIGN no matter what.
336 The alignment returned is, by default, the alignment of the thing that
337 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
339 Otherwise, look at the expression to see if we can do better, i.e., if the
340 expression is actually pointing at an object whose alignment is tighter. */
343 get_pointer_alignment (tree exp, unsigned int max_align)
345 unsigned int align, inner;
347 /* We rely on TER to compute accurate alignment information. */
348 if (!(optimize && flag_tree_ter))
349 return 0;
351 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
352 return 0;
354 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
355 align = MIN (align, max_align);
357 while (1)
359 switch (TREE_CODE (exp))
361 CASE_CONVERT:
362 exp = TREE_OPERAND (exp, 0);
363 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
364 return align;
366 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
367 align = MIN (inner, max_align);
368 break;
370 case POINTER_PLUS_EXPR:
371 /* If sum of pointer + int, restrict our maximum alignment to that
372 imposed by the integer. If not, we can't do any better than
373 ALIGN. */
374 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
375 return align;
377 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
378 & (max_align / BITS_PER_UNIT - 1))
379 != 0)
380 max_align >>= 1;
382 exp = TREE_OPERAND (exp, 0);
383 break;
385 case ADDR_EXPR:
386 /* See what we are pointing at and look at its alignment. */
387 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
389 default:
390 return align;
395 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
396 way, because it could contain a zero byte in the middle.
397 TREE_STRING_LENGTH is the size of the character array, not the string.
399 ONLY_VALUE should be nonzero if the result is not going to be emitted
400 into the instruction stream and zero if it is going to be expanded.
401 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
402 is returned, otherwise NULL, since
403 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
404 evaluate the side-effects.
406 The value returned is of type `ssizetype'.
408 Unfortunately, string_constant can't access the values of const char
409 arrays with initializers, so neither can we do so here. */
411 tree
412 c_strlen (tree src, int only_value)
414 tree offset_node;
415 HOST_WIDE_INT offset;
416 int max;
417 const char *ptr;
419 STRIP_NOPS (src);
420 if (TREE_CODE (src) == COND_EXPR
421 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
423 tree len1, len2;
425 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
426 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
427 if (tree_int_cst_equal (len1, len2))
428 return len1;
431 if (TREE_CODE (src) == COMPOUND_EXPR
432 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
433 return c_strlen (TREE_OPERAND (src, 1), only_value);
435 src = string_constant (src, &offset_node);
436 if (src == 0)
437 return NULL_TREE;
439 max = TREE_STRING_LENGTH (src) - 1;
440 ptr = TREE_STRING_POINTER (src);
442 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
444 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
445 compute the offset to the following null if we don't know where to
446 start searching for it. */
447 int i;
449 for (i = 0; i < max; i++)
450 if (ptr[i] == 0)
451 return NULL_TREE;
453 /* We don't know the starting offset, but we do know that the string
454 has no internal zero bytes. We can assume that the offset falls
455 within the bounds of the string; otherwise, the programmer deserves
456 what he gets. Subtract the offset from the length of the string,
457 and return that. This would perhaps not be valid if we were dealing
458 with named arrays in addition to literal string constants. */
460 return size_diffop (size_int (max), offset_node);
463 /* We have a known offset into the string. Start searching there for
464 a null character if we can represent it as a single HOST_WIDE_INT. */
465 if (offset_node == 0)
466 offset = 0;
467 else if (! host_integerp (offset_node, 0))
468 offset = -1;
469 else
470 offset = tree_low_cst (offset_node, 0);
472 /* If the offset is known to be out of bounds, warn, and call strlen at
473 runtime. */
474 if (offset < 0 || offset > max)
476 /* Suppress multiple warnings for propagated constant strings. */
477 if (! TREE_NO_WARNING (src))
479 warning (0, "offset outside bounds of constant string");
480 TREE_NO_WARNING (src) = 1;
482 return NULL_TREE;
485 /* Use strlen to search for the first zero byte. Since any strings
486 constructed with build_string will have nulls appended, we win even
487 if we get handed something like (char[4])"abcd".
489 Since OFFSET is our starting index into the string, no further
490 calculation is needed. */
491 return ssize_int (strlen (ptr + offset));
494 /* Return a char pointer for a C string if it is a string constant
495 or sum of string constant and integer constant. */
497 static const char *
498 c_getstr (tree src)
500 tree offset_node;
502 src = string_constant (src, &offset_node);
503 if (src == 0)
504 return 0;
506 if (offset_node == 0)
507 return TREE_STRING_POINTER (src);
508 else if (!host_integerp (offset_node, 1)
509 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
510 return 0;
512 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
515 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
516 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
518 static rtx
519 c_readstr (const char *str, enum machine_mode mode)
521 HOST_WIDE_INT c[2];
522 HOST_WIDE_INT ch;
523 unsigned int i, j;
525 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
527 c[0] = 0;
528 c[1] = 0;
529 ch = 1;
530 for (i = 0; i < GET_MODE_SIZE (mode); i++)
532 j = i;
533 if (WORDS_BIG_ENDIAN)
534 j = GET_MODE_SIZE (mode) - i - 1;
535 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
536 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
537 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
538 j *= BITS_PER_UNIT;
539 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
541 if (ch)
542 ch = (unsigned char) str[i];
543 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
545 return immed_double_const (c[0], c[1], mode);
548 /* Cast a target constant CST to target CHAR and if that value fits into
549 host char type, return zero and put that value into variable pointed to by
550 P. */
552 static int
553 target_char_cast (tree cst, char *p)
555 unsigned HOST_WIDE_INT val, hostval;
557 if (!host_integerp (cst, 1)
558 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
559 return 1;
561 val = tree_low_cst (cst, 1);
562 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
563 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
565 hostval = val;
566 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
567 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
569 if (val != hostval)
570 return 1;
572 *p = hostval;
573 return 0;
576 /* Similar to save_expr, but assumes that arbitrary code is not executed
577 in between the multiple evaluations. In particular, we assume that a
578 non-addressable local variable will not be modified. */
580 static tree
581 builtin_save_expr (tree exp)
583 if (TREE_ADDRESSABLE (exp) == 0
584 && (TREE_CODE (exp) == PARM_DECL
585 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
586 return exp;
588 return save_expr (exp);
591 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
592 times to get the address of either a higher stack frame, or a return
593 address located within it (depending on FNDECL_CODE). */
595 static rtx
596 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
598 int i;
600 #ifdef INITIAL_FRAME_ADDRESS_RTX
601 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
602 #else
603 rtx tem;
605 /* For a zero count with __builtin_return_address, we don't care what
606 frame address we return, because target-specific definitions will
607 override us. Therefore frame pointer elimination is OK, and using
608 the soft frame pointer is OK.
610 For a nonzero count, or a zero count with __builtin_frame_address,
611 we require a stable offset from the current frame pointer to the
612 previous one, so we must use the hard frame pointer, and
613 we must disable frame pointer elimination. */
614 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
615 tem = frame_pointer_rtx;
616 else
618 tem = hard_frame_pointer_rtx;
620 /* Tell reload not to eliminate the frame pointer. */
621 crtl->accesses_prior_frames = 1;
623 #endif
625 /* Some machines need special handling before we can access
626 arbitrary frames. For example, on the SPARC, we must first flush
627 all register windows to the stack. */
628 #ifdef SETUP_FRAME_ADDRESSES
629 if (count > 0)
630 SETUP_FRAME_ADDRESSES ();
631 #endif
633 /* On the SPARC, the return address is not in the frame, it is in a
634 register. There is no way to access it off of the current frame
635 pointer, but it can be accessed off the previous frame pointer by
636 reading the value from the register window save area. */
637 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
638 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 count--;
640 #endif
642 /* Scan back COUNT frames to the specified frame. */
643 for (i = 0; i < count; i++)
645 /* Assume the dynamic chain pointer is in the word that the
646 frame address points to, unless otherwise specified. */
647 #ifdef DYNAMIC_CHAIN_ADDRESS
648 tem = DYNAMIC_CHAIN_ADDRESS (tem);
649 #endif
650 tem = memory_address (Pmode, tem);
651 tem = gen_frame_mem (Pmode, tem);
652 tem = copy_to_reg (tem);
655 /* For __builtin_frame_address, return what we've got. But, on
656 the SPARC for example, we may have to add a bias. */
657 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
658 #ifdef FRAME_ADDR_RTX
659 return FRAME_ADDR_RTX (tem);
660 #else
661 return tem;
662 #endif
664 /* For __builtin_return_address, get the return address from that frame. */
665 #ifdef RETURN_ADDR_RTX
666 tem = RETURN_ADDR_RTX (count, tem);
667 #else
668 tem = memory_address (Pmode,
669 plus_constant (tem, GET_MODE_SIZE (Pmode)));
670 tem = gen_frame_mem (Pmode, tem);
671 #endif
672 return tem;
675 /* Alias set used for setjmp buffer. */
676 static alias_set_type setjmp_alias_set = -1;
678 /* Construct the leading half of a __builtin_setjmp call. Control will
679 return to RECEIVER_LABEL. This is also called directly by the SJLJ
680 exception handling code. */
682 void
683 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
685 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
686 rtx stack_save;
687 rtx mem;
689 if (setjmp_alias_set == -1)
690 setjmp_alias_set = new_alias_set ();
692 buf_addr = convert_memory_address (Pmode, buf_addr);
694 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
696 /* We store the frame pointer and the address of receiver_label in
697 the buffer and use the rest of it for the stack save area, which
698 is machine-dependent. */
700 mem = gen_rtx_MEM (Pmode, buf_addr);
701 set_mem_alias_set (mem, setjmp_alias_set);
702 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
704 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
705 set_mem_alias_set (mem, setjmp_alias_set);
707 emit_move_insn (validize_mem (mem),
708 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
710 stack_save = gen_rtx_MEM (sa_mode,
711 plus_constant (buf_addr,
712 2 * GET_MODE_SIZE (Pmode)));
713 set_mem_alias_set (stack_save, setjmp_alias_set);
714 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
716 /* If there is further processing to do, do it. */
717 #ifdef HAVE_builtin_setjmp_setup
718 if (HAVE_builtin_setjmp_setup)
719 emit_insn (gen_builtin_setjmp_setup (buf_addr));
720 #endif
722 /* Tell optimize_save_area_alloca that extra work is going to
723 need to go on during alloca. */
724 cfun->calls_setjmp = 1;
726 /* We have a nonlocal label. */
727 cfun->has_nonlocal_label = 1;
730 /* Construct the trailing part of a __builtin_setjmp call. This is
731 also called directly by the SJLJ exception handling code. */
733 void
734 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
736 /* Clobber the FP when we get here, so we have to make sure it's
737 marked as used by this function. */
738 emit_use (hard_frame_pointer_rtx);
740 /* Mark the static chain as clobbered here so life information
741 doesn't get messed up for it. */
742 emit_clobber (static_chain_rtx);
744 /* Now put in the code to restore the frame pointer, and argument
745 pointer, if needed. */
746 #ifdef HAVE_nonlocal_goto
747 if (! HAVE_nonlocal_goto)
748 #endif
750 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
751 /* This might change the hard frame pointer in ways that aren't
752 apparent to early optimization passes, so force a clobber. */
753 emit_clobber (hard_frame_pointer_rtx);
756 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
757 if (fixed_regs[ARG_POINTER_REGNUM])
759 #ifdef ELIMINABLE_REGS
760 size_t i;
761 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
763 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
764 if (elim_regs[i].from == ARG_POINTER_REGNUM
765 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
766 break;
768 if (i == ARRAY_SIZE (elim_regs))
769 #endif
771 /* Now restore our arg pointer from the address at which it
772 was saved in our stack frame. */
773 emit_move_insn (crtl->args.internal_arg_pointer,
774 copy_to_reg (get_arg_pointer_save_area ()));
777 #endif
779 #ifdef HAVE_builtin_setjmp_receiver
780 if (HAVE_builtin_setjmp_receiver)
781 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
782 else
783 #endif
784 #ifdef HAVE_nonlocal_goto_receiver
785 if (HAVE_nonlocal_goto_receiver)
786 emit_insn (gen_nonlocal_goto_receiver ());
787 else
788 #endif
789 { /* Nothing */ }
791 /* We must not allow the code we just generated to be reordered by
792 scheduling. Specifically, the update of the frame pointer must
793 happen immediately, not later. */
794 emit_insn (gen_blockage ());
797 /* __builtin_longjmp is passed a pointer to an array of five words (not
798 all will be used on all machines). It operates similarly to the C
799 library function of the same name, but is more efficient. Much of
800 the code below is copied from the handling of non-local gotos. */
802 static void
803 expand_builtin_longjmp (rtx buf_addr, rtx value)
805 rtx fp, lab, stack, insn, last;
806 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
808 /* DRAP is needed for stack realign if longjmp is expanded to current
809 function */
810 if (SUPPORTS_STACK_ALIGNMENT)
811 crtl->need_drap = true;
813 if (setjmp_alias_set == -1)
814 setjmp_alias_set = new_alias_set ();
816 buf_addr = convert_memory_address (Pmode, buf_addr);
818 buf_addr = force_reg (Pmode, buf_addr);
820 /* We used to store value in static_chain_rtx, but that fails if pointers
821 are smaller than integers. We instead require that the user must pass
822 a second argument of 1, because that is what builtin_setjmp will
823 return. This also makes EH slightly more efficient, since we are no
824 longer copying around a value that we don't care about. */
825 gcc_assert (value == const1_rtx);
827 last = get_last_insn ();
828 #ifdef HAVE_builtin_longjmp
829 if (HAVE_builtin_longjmp)
830 emit_insn (gen_builtin_longjmp (buf_addr));
831 else
832 #endif
834 fp = gen_rtx_MEM (Pmode, buf_addr);
835 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
836 GET_MODE_SIZE (Pmode)));
838 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
839 2 * GET_MODE_SIZE (Pmode)));
840 set_mem_alias_set (fp, setjmp_alias_set);
841 set_mem_alias_set (lab, setjmp_alias_set);
842 set_mem_alias_set (stack, setjmp_alias_set);
844 /* Pick up FP, label, and SP from the block and jump. This code is
845 from expand_goto in stmt.c; see there for detailed comments. */
846 #ifdef HAVE_nonlocal_goto
847 if (HAVE_nonlocal_goto)
848 /* We have to pass a value to the nonlocal_goto pattern that will
849 get copied into the static_chain pointer, but it does not matter
850 what that value is, because builtin_setjmp does not use it. */
851 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
852 else
853 #endif
855 lab = copy_to_reg (lab);
857 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
858 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
860 emit_move_insn (hard_frame_pointer_rtx, fp);
861 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
863 emit_use (hard_frame_pointer_rtx);
864 emit_use (stack_pointer_rtx);
865 emit_indirect_jump (lab);
869 /* Search backwards and mark the jump insn as a non-local goto.
870 Note that this precludes the use of __builtin_longjmp to a
871 __builtin_setjmp target in the same function. However, we've
872 already cautioned the user that these functions are for
873 internal exception handling use only. */
874 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
876 gcc_assert (insn != last);
878 if (JUMP_P (insn))
880 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
881 break;
883 else if (CALL_P (insn))
884 break;
888 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
889 and the address of the save area. */
891 static rtx
892 expand_builtin_nonlocal_goto (tree exp)
894 tree t_label, t_save_area;
895 rtx r_label, r_save_area, r_fp, r_sp, insn;
897 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
898 return NULL_RTX;
900 t_label = CALL_EXPR_ARG (exp, 0);
901 t_save_area = CALL_EXPR_ARG (exp, 1);
903 r_label = expand_normal (t_label);
904 r_label = convert_memory_address (Pmode, r_label);
905 r_save_area = expand_normal (t_save_area);
906 r_save_area = convert_memory_address (Pmode, r_save_area);
907 /* Copy the address of the save location to a register just in case it was based
908 on the frame pointer. */
909 r_save_area = copy_to_reg (r_save_area);
910 r_fp = gen_rtx_MEM (Pmode, r_save_area);
911 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
912 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
914 crtl->has_nonlocal_goto = 1;
916 #ifdef HAVE_nonlocal_goto
917 /* ??? We no longer need to pass the static chain value, afaik. */
918 if (HAVE_nonlocal_goto)
919 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
920 else
921 #endif
923 r_label = copy_to_reg (r_label);
925 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
926 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
928 /* Restore frame pointer for containing function.
929 This sets the actual hard register used for the frame pointer
930 to the location of the function's incoming static chain info.
931 The non-local goto handler will then adjust it to contain the
932 proper value and reload the argument pointer, if needed. */
933 emit_move_insn (hard_frame_pointer_rtx, r_fp);
934 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
936 /* USE of hard_frame_pointer_rtx added for consistency;
937 not clear if really needed. */
938 emit_use (hard_frame_pointer_rtx);
939 emit_use (stack_pointer_rtx);
941 /* If the architecture is using a GP register, we must
942 conservatively assume that the target function makes use of it.
943 The prologue of functions with nonlocal gotos must therefore
944 initialize the GP register to the appropriate value, and we
945 must then make sure that this value is live at the point
946 of the jump. (Note that this doesn't necessarily apply
947 to targets with a nonlocal_goto pattern; they are free
948 to implement it in their own way. Note also that this is
949 a no-op if the GP register is a global invariant.) */
950 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
951 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
952 emit_use (pic_offset_table_rtx);
954 emit_indirect_jump (r_label);
957 /* Search backwards to the jump insn and mark it as a
958 non-local goto. */
959 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
961 if (JUMP_P (insn))
963 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
964 break;
966 else if (CALL_P (insn))
967 break;
970 return const0_rtx;
973 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
974 (not all will be used on all machines) that was passed to __builtin_setjmp.
975 It updates the stack pointer in that block to correspond to the current
976 stack pointer. */
978 static void
979 expand_builtin_update_setjmp_buf (rtx buf_addr)
981 enum machine_mode sa_mode = Pmode;
982 rtx stack_save;
985 #ifdef HAVE_save_stack_nonlocal
986 if (HAVE_save_stack_nonlocal)
987 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
988 #endif
989 #ifdef STACK_SAVEAREA_MODE
990 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
991 #endif
993 stack_save
994 = gen_rtx_MEM (sa_mode,
995 memory_address
996 (sa_mode,
997 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
999 #ifdef HAVE_setjmp
1000 if (HAVE_setjmp)
1001 emit_insn (gen_setjmp ());
1002 #endif
1004 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1007 /* Expand a call to __builtin_prefetch. For a target that does not support
1008 data prefetch, evaluate the memory address argument in case it has side
1009 effects. */
1011 static void
1012 expand_builtin_prefetch (tree exp)
1014 tree arg0, arg1, arg2;
1015 int nargs;
1016 rtx op0, op1, op2;
1018 if (!validate_arglist (exp, POINTER_TYPE, 0))
1019 return;
1021 arg0 = CALL_EXPR_ARG (exp, 0);
1023 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1024 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1025 locality). */
1026 nargs = call_expr_nargs (exp);
1027 if (nargs > 1)
1028 arg1 = CALL_EXPR_ARG (exp, 1);
1029 else
1030 arg1 = integer_zero_node;
1031 if (nargs > 2)
1032 arg2 = CALL_EXPR_ARG (exp, 2);
1033 else
1034 arg2 = build_int_cst (NULL_TREE, 3);
1036 /* Argument 0 is an address. */
1037 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1039 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1040 if (TREE_CODE (arg1) != INTEGER_CST)
1042 error ("second argument to %<__builtin_prefetch%> must be a constant");
1043 arg1 = integer_zero_node;
1045 op1 = expand_normal (arg1);
1046 /* Argument 1 must be either zero or one. */
1047 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1049 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1050 " using zero");
1051 op1 = const0_rtx;
1054 /* Argument 2 (locality) must be a compile-time constant int. */
1055 if (TREE_CODE (arg2) != INTEGER_CST)
1057 error ("third argument to %<__builtin_prefetch%> must be a constant");
1058 arg2 = integer_zero_node;
1060 op2 = expand_normal (arg2);
1061 /* Argument 2 must be 0, 1, 2, or 3. */
1062 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1064 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1065 op2 = const0_rtx;
1068 #ifdef HAVE_prefetch
1069 if (HAVE_prefetch)
1071 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1072 (op0,
1073 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1074 || (GET_MODE (op0) != Pmode))
1076 op0 = convert_memory_address (Pmode, op0);
1077 op0 = force_reg (Pmode, op0);
1079 emit_insn (gen_prefetch (op0, op1, op2));
1081 #endif
1083 /* Don't do anything with direct references to volatile memory, but
1084 generate code to handle other side effects. */
1085 if (!MEM_P (op0) && side_effects_p (op0))
1086 emit_insn (op0);
1089 /* Get a MEM rtx for expression EXP which is the address of an operand
1090 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1091 the maximum length of the block of memory that might be accessed or
1092 NULL if unknown. */
1094 static rtx
1095 get_memory_rtx (tree exp, tree len)
1097 tree orig_exp = exp;
1098 rtx addr, mem;
1099 HOST_WIDE_INT off;
1101 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1102 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1103 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1104 exp = TREE_OPERAND (exp, 0);
1106 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1107 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1109 /* Get an expression we can use to find the attributes to assign to MEM.
1110 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1111 we can. First remove any nops. */
1112 while (CONVERT_EXPR_P (exp)
1113 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1114 exp = TREE_OPERAND (exp, 0);
1116 off = 0;
1117 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1118 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1119 && host_integerp (TREE_OPERAND (exp, 1), 0)
1120 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1121 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1122 else if (TREE_CODE (exp) == ADDR_EXPR)
1123 exp = TREE_OPERAND (exp, 0);
1124 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1125 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1126 else
1127 exp = NULL;
1129 /* Honor attributes derived from exp, except for the alias set
1130 (as builtin stringops may alias with anything) and the size
1131 (as stringops may access multiple array elements). */
1132 if (exp)
1134 set_mem_attributes (mem, exp, 0);
1136 if (off)
1137 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1139 /* Allow the string and memory builtins to overflow from one
1140 field into another, see http://gcc.gnu.org/PR23561.
1141 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1142 memory accessed by the string or memory builtin will fit
1143 within the field. */
1144 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1146 tree mem_expr = MEM_EXPR (mem);
1147 HOST_WIDE_INT offset = -1, length = -1;
1148 tree inner = exp;
1150 while (TREE_CODE (inner) == ARRAY_REF
1151 || CONVERT_EXPR_P (inner)
1152 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1153 || TREE_CODE (inner) == SAVE_EXPR)
1154 inner = TREE_OPERAND (inner, 0);
1156 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1158 if (MEM_OFFSET (mem)
1159 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1160 offset = INTVAL (MEM_OFFSET (mem));
1162 if (offset >= 0 && len && host_integerp (len, 0))
1163 length = tree_low_cst (len, 0);
1165 while (TREE_CODE (inner) == COMPONENT_REF)
1167 tree field = TREE_OPERAND (inner, 1);
1168 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1169 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1171 /* Bitfields are generally not byte-addressable. */
1172 gcc_assert (!DECL_BIT_FIELD (field)
1173 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1174 % BITS_PER_UNIT) == 0
1175 && host_integerp (DECL_SIZE (field), 0)
1176 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1177 % BITS_PER_UNIT) == 0));
1179 /* If we can prove that the memory starting at XEXP (mem, 0) and
1180 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1181 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1182 fields without DECL_SIZE_UNIT like flexible array members. */
1183 if (length >= 0
1184 && DECL_SIZE_UNIT (field)
1185 && host_integerp (DECL_SIZE_UNIT (field), 0))
1187 HOST_WIDE_INT size
1188 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1189 if (offset <= size
1190 && length <= size
1191 && offset + length <= size)
1192 break;
1195 if (offset >= 0
1196 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1197 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1198 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 / BITS_PER_UNIT;
1200 else
1202 offset = -1;
1203 length = -1;
1206 mem_expr = TREE_OPERAND (mem_expr, 0);
1207 inner = TREE_OPERAND (inner, 0);
1210 if (mem_expr == NULL)
1211 offset = -1;
1212 if (mem_expr != MEM_EXPR (mem))
1214 set_mem_expr (mem, mem_expr);
1215 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1218 set_mem_alias_set (mem, 0);
1219 set_mem_size (mem, NULL_RTX);
1222 return mem;
1225 /* Built-in functions to perform an untyped call and return. */
1227 /* For each register that may be used for calling a function, this
1228 gives a mode used to copy the register's value. VOIDmode indicates
1229 the register is not used for calling a function. If the machine
1230 has register windows, this gives only the outbound registers.
1231 INCOMING_REGNO gives the corresponding inbound register. */
1232 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1234 /* For each register that may be used for returning values, this gives
1235 a mode used to copy the register's value. VOIDmode indicates the
1236 register is not used for returning values. If the machine has
1237 register windows, this gives only the outbound registers.
1238 INCOMING_REGNO gives the corresponding inbound register. */
1239 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1241 /* For each register that may be used for calling a function, this
1242 gives the offset of that register into the block returned by
1243 __builtin_apply_args. 0 indicates that the register is not
1244 used for calling a function. */
1245 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1247 /* Return the size required for the block returned by __builtin_apply_args,
1248 and initialize apply_args_mode. */
1250 static int
1251 apply_args_size (void)
1253 static int size = -1;
1254 int align;
1255 unsigned int regno;
1256 enum machine_mode mode;
1258 /* The values computed by this function never change. */
1259 if (size < 0)
1261 /* The first value is the incoming arg-pointer. */
1262 size = GET_MODE_SIZE (Pmode);
1264 /* The second value is the structure value address unless this is
1265 passed as an "invisible" first argument. */
1266 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1267 size += GET_MODE_SIZE (Pmode);
1269 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1270 if (FUNCTION_ARG_REGNO_P (regno))
1272 mode = reg_raw_mode[regno];
1274 gcc_assert (mode != VOIDmode);
1276 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1277 if (size % align != 0)
1278 size = CEIL (size, align) * align;
1279 apply_args_reg_offset[regno] = size;
1280 size += GET_MODE_SIZE (mode);
1281 apply_args_mode[regno] = mode;
1283 else
1285 apply_args_mode[regno] = VOIDmode;
1286 apply_args_reg_offset[regno] = 0;
1289 return size;
1292 /* Return the size required for the block returned by __builtin_apply,
1293 and initialize apply_result_mode. */
1295 static int
1296 apply_result_size (void)
1298 static int size = -1;
1299 int align, regno;
1300 enum machine_mode mode;
1302 /* The values computed by this function never change. */
1303 if (size < 0)
1305 size = 0;
1307 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1308 if (FUNCTION_VALUE_REGNO_P (regno))
1310 mode = reg_raw_mode[regno];
1312 gcc_assert (mode != VOIDmode);
1314 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1315 if (size % align != 0)
1316 size = CEIL (size, align) * align;
1317 size += GET_MODE_SIZE (mode);
1318 apply_result_mode[regno] = mode;
1320 else
1321 apply_result_mode[regno] = VOIDmode;
1323 /* Allow targets that use untyped_call and untyped_return to override
1324 the size so that machine-specific information can be stored here. */
1325 #ifdef APPLY_RESULT_SIZE
1326 size = APPLY_RESULT_SIZE;
1327 #endif
1329 return size;
1332 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1333 /* Create a vector describing the result block RESULT. If SAVEP is true,
1334 the result block is used to save the values; otherwise it is used to
1335 restore the values. */
1337 static rtx
1338 result_vector (int savep, rtx result)
1340 int regno, size, align, nelts;
1341 enum machine_mode mode;
1342 rtx reg, mem;
1343 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1345 size = nelts = 0;
1346 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1347 if ((mode = apply_result_mode[regno]) != VOIDmode)
1349 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1350 if (size % align != 0)
1351 size = CEIL (size, align) * align;
1352 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1353 mem = adjust_address (result, mode, size);
1354 savevec[nelts++] = (savep
1355 ? gen_rtx_SET (VOIDmode, mem, reg)
1356 : gen_rtx_SET (VOIDmode, reg, mem));
1357 size += GET_MODE_SIZE (mode);
1359 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1361 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1363 /* Save the state required to perform an untyped call with the same
1364 arguments as were passed to the current function. */
1366 static rtx
1367 expand_builtin_apply_args_1 (void)
1369 rtx registers, tem;
1370 int size, align, regno;
1371 enum machine_mode mode;
1372 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1374 /* Create a block where the arg-pointer, structure value address,
1375 and argument registers can be saved. */
1376 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1378 /* Walk past the arg-pointer and structure value address. */
1379 size = GET_MODE_SIZE (Pmode);
1380 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1381 size += GET_MODE_SIZE (Pmode);
1383 /* Save each register used in calling a function to the block. */
1384 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1385 if ((mode = apply_args_mode[regno]) != VOIDmode)
1387 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1388 if (size % align != 0)
1389 size = CEIL (size, align) * align;
1391 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1393 emit_move_insn (adjust_address (registers, mode, size), tem);
1394 size += GET_MODE_SIZE (mode);
1397 /* Save the arg pointer to the block. */
1398 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1399 #ifdef STACK_GROWS_DOWNWARD
1400 /* We need the pointer as the caller actually passed them to us, not
1401 as we might have pretended they were passed. Make sure it's a valid
1402 operand, as emit_move_insn isn't expected to handle a PLUS. */
1404 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1405 NULL_RTX);
1406 #endif
1407 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1409 size = GET_MODE_SIZE (Pmode);
1411 /* Save the structure value address unless this is passed as an
1412 "invisible" first argument. */
1413 if (struct_incoming_value)
1415 emit_move_insn (adjust_address (registers, Pmode, size),
1416 copy_to_reg (struct_incoming_value));
1417 size += GET_MODE_SIZE (Pmode);
1420 /* Return the address of the block. */
1421 return copy_addr_to_reg (XEXP (registers, 0));
1424 /* __builtin_apply_args returns block of memory allocated on
1425 the stack into which is stored the arg pointer, structure
1426 value address, static chain, and all the registers that might
1427 possibly be used in performing a function call. The code is
1428 moved to the start of the function so the incoming values are
1429 saved. */
1431 static rtx
1432 expand_builtin_apply_args (void)
1434 /* Don't do __builtin_apply_args more than once in a function.
1435 Save the result of the first call and reuse it. */
1436 if (apply_args_value != 0)
1437 return apply_args_value;
1439 /* When this function is called, it means that registers must be
1440 saved on entry to this function. So we migrate the
1441 call to the first insn of this function. */
1442 rtx temp;
1443 rtx seq;
1445 start_sequence ();
1446 temp = expand_builtin_apply_args_1 ();
1447 seq = get_insns ();
1448 end_sequence ();
1450 apply_args_value = temp;
1452 /* Put the insns after the NOTE that starts the function.
1453 If this is inside a start_sequence, make the outer-level insn
1454 chain current, so the code is placed at the start of the
1455 function. If internal_arg_pointer is a non-virtual pseudo,
1456 it needs to be placed after the function that initializes
1457 that pseudo. */
1458 push_topmost_sequence ();
1459 if (REG_P (crtl->args.internal_arg_pointer)
1460 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1461 emit_insn_before (seq, parm_birth_insn);
1462 else
1463 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1464 pop_topmost_sequence ();
1465 return temp;
1469 /* Perform an untyped call and save the state required to perform an
1470 untyped return of whatever value was returned by the given function. */
1472 static rtx
1473 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1475 int size, align, regno;
1476 enum machine_mode mode;
1477 rtx incoming_args, result, reg, dest, src, call_insn;
1478 rtx old_stack_level = 0;
1479 rtx call_fusage = 0;
1480 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1482 arguments = convert_memory_address (Pmode, arguments);
1484 /* Create a block where the return registers can be saved. */
1485 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1487 /* Fetch the arg pointer from the ARGUMENTS block. */
1488 incoming_args = gen_reg_rtx (Pmode);
1489 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1490 #ifndef STACK_GROWS_DOWNWARD
1491 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1492 incoming_args, 0, OPTAB_LIB_WIDEN);
1493 #endif
1495 /* Push a new argument block and copy the arguments. Do not allow
1496 the (potential) memcpy call below to interfere with our stack
1497 manipulations. */
1498 do_pending_stack_adjust ();
1499 NO_DEFER_POP;
1501 /* Save the stack with nonlocal if available. */
1502 #ifdef HAVE_save_stack_nonlocal
1503 if (HAVE_save_stack_nonlocal)
1504 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1505 else
1506 #endif
1507 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1509 /* Allocate a block of memory onto the stack and copy the memory
1510 arguments to the outgoing arguments address. */
1511 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1513 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1514 may have already set current_function_calls_alloca to true.
1515 current_function_calls_alloca won't be set if argsize is zero,
1516 so we have to guarantee need_drap is true here. */
1517 if (SUPPORTS_STACK_ALIGNMENT)
1518 crtl->need_drap = true;
1520 dest = virtual_outgoing_args_rtx;
1521 #ifndef STACK_GROWS_DOWNWARD
1522 if (GET_CODE (argsize) == CONST_INT)
1523 dest = plus_constant (dest, -INTVAL (argsize));
1524 else
1525 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1526 #endif
1527 dest = gen_rtx_MEM (BLKmode, dest);
1528 set_mem_align (dest, PARM_BOUNDARY);
1529 src = gen_rtx_MEM (BLKmode, incoming_args);
1530 set_mem_align (src, PARM_BOUNDARY);
1531 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1533 /* Refer to the argument block. */
1534 apply_args_size ();
1535 arguments = gen_rtx_MEM (BLKmode, arguments);
1536 set_mem_align (arguments, PARM_BOUNDARY);
1538 /* Walk past the arg-pointer and structure value address. */
1539 size = GET_MODE_SIZE (Pmode);
1540 if (struct_value)
1541 size += GET_MODE_SIZE (Pmode);
1543 /* Restore each of the registers previously saved. Make USE insns
1544 for each of these registers for use in making the call. */
1545 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1546 if ((mode = apply_args_mode[regno]) != VOIDmode)
1548 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1549 if (size % align != 0)
1550 size = CEIL (size, align) * align;
1551 reg = gen_rtx_REG (mode, regno);
1552 emit_move_insn (reg, adjust_address (arguments, mode, size));
1553 use_reg (&call_fusage, reg);
1554 size += GET_MODE_SIZE (mode);
1557 /* Restore the structure value address unless this is passed as an
1558 "invisible" first argument. */
1559 size = GET_MODE_SIZE (Pmode);
1560 if (struct_value)
1562 rtx value = gen_reg_rtx (Pmode);
1563 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1564 emit_move_insn (struct_value, value);
1565 if (REG_P (struct_value))
1566 use_reg (&call_fusage, struct_value);
1567 size += GET_MODE_SIZE (Pmode);
1570 /* All arguments and registers used for the call are set up by now! */
1571 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1573 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1574 and we don't want to load it into a register as an optimization,
1575 because prepare_call_address already did it if it should be done. */
1576 if (GET_CODE (function) != SYMBOL_REF)
1577 function = memory_address (FUNCTION_MODE, function);
1579 /* Generate the actual call instruction and save the return value. */
1580 #ifdef HAVE_untyped_call
1581 if (HAVE_untyped_call)
1582 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1583 result, result_vector (1, result)));
1584 else
1585 #endif
1586 #ifdef HAVE_call_value
1587 if (HAVE_call_value)
1589 rtx valreg = 0;
1591 /* Locate the unique return register. It is not possible to
1592 express a call that sets more than one return register using
1593 call_value; use untyped_call for that. In fact, untyped_call
1594 only needs to save the return registers in the given block. */
1595 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1596 if ((mode = apply_result_mode[regno]) != VOIDmode)
1598 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1600 valreg = gen_rtx_REG (mode, regno);
1603 emit_call_insn (GEN_CALL_VALUE (valreg,
1604 gen_rtx_MEM (FUNCTION_MODE, function),
1605 const0_rtx, NULL_RTX, const0_rtx));
1607 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1609 else
1610 #endif
1611 gcc_unreachable ();
1613 /* Find the CALL insn we just emitted, and attach the register usage
1614 information. */
1615 call_insn = last_call_insn ();
1616 add_function_usage_to (call_insn, call_fusage);
1618 /* Restore the stack. */
1619 #ifdef HAVE_save_stack_nonlocal
1620 if (HAVE_save_stack_nonlocal)
1621 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1622 else
1623 #endif
1624 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1626 OK_DEFER_POP;
1628 /* Return the address of the result block. */
1629 result = copy_addr_to_reg (XEXP (result, 0));
1630 return convert_memory_address (ptr_mode, result);
1633 /* Perform an untyped return. */
1635 static void
1636 expand_builtin_return (rtx result)
1638 int size, align, regno;
1639 enum machine_mode mode;
1640 rtx reg;
1641 rtx call_fusage = 0;
1643 result = convert_memory_address (Pmode, result);
1645 apply_result_size ();
1646 result = gen_rtx_MEM (BLKmode, result);
1648 #ifdef HAVE_untyped_return
1649 if (HAVE_untyped_return)
1651 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1652 emit_barrier ();
1653 return;
1655 #endif
1657 /* Restore the return value and note that each value is used. */
1658 size = 0;
1659 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1660 if ((mode = apply_result_mode[regno]) != VOIDmode)
1662 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1663 if (size % align != 0)
1664 size = CEIL (size, align) * align;
1665 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1666 emit_move_insn (reg, adjust_address (result, mode, size));
1668 push_to_sequence (call_fusage);
1669 emit_use (reg);
1670 call_fusage = get_insns ();
1671 end_sequence ();
1672 size += GET_MODE_SIZE (mode);
1675 /* Put the USE insns before the return. */
1676 emit_insn (call_fusage);
1678 /* Return whatever values was restored by jumping directly to the end
1679 of the function. */
1680 expand_naked_return ();
1683 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1685 static enum type_class
1686 type_to_class (tree type)
1688 switch (TREE_CODE (type))
1690 case VOID_TYPE: return void_type_class;
1691 case INTEGER_TYPE: return integer_type_class;
1692 case ENUMERAL_TYPE: return enumeral_type_class;
1693 case BOOLEAN_TYPE: return boolean_type_class;
1694 case POINTER_TYPE: return pointer_type_class;
1695 case REFERENCE_TYPE: return reference_type_class;
1696 case OFFSET_TYPE: return offset_type_class;
1697 case REAL_TYPE: return real_type_class;
1698 case COMPLEX_TYPE: return complex_type_class;
1699 case FUNCTION_TYPE: return function_type_class;
1700 case METHOD_TYPE: return method_type_class;
1701 case RECORD_TYPE: return record_type_class;
1702 case UNION_TYPE:
1703 case QUAL_UNION_TYPE: return union_type_class;
1704 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1705 ? string_type_class : array_type_class);
1706 case LANG_TYPE: return lang_type_class;
1707 default: return no_type_class;
1711 /* Expand a call EXP to __builtin_classify_type. */
1713 static rtx
1714 expand_builtin_classify_type (tree exp)
1716 if (call_expr_nargs (exp))
1717 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1718 return GEN_INT (no_type_class);
1721 /* This helper macro, meant to be used in mathfn_built_in below,
1722 determines which among a set of three builtin math functions is
1723 appropriate for a given type mode. The `F' and `L' cases are
1724 automatically generated from the `double' case. */
1725 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1726 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1727 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1728 fcodel = BUILT_IN_MATHFN##L ; break;
1729 /* Similar to above, but appends _R after any F/L suffix. */
1730 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1731 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1732 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1733 fcodel = BUILT_IN_MATHFN##L_R ; break;
1735 /* Return mathematic function equivalent to FN but operating directly
1736 on TYPE, if available. If IMPLICIT is true find the function in
1737 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1738 can't do the conversion, return zero. */
1740 static tree
1741 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1743 tree const *const fn_arr
1744 = implicit ? implicit_built_in_decls : built_in_decls;
1745 enum built_in_function fcode, fcodef, fcodel;
1747 switch (fn)
1749 CASE_MATHFN (BUILT_IN_ACOS)
1750 CASE_MATHFN (BUILT_IN_ACOSH)
1751 CASE_MATHFN (BUILT_IN_ASIN)
1752 CASE_MATHFN (BUILT_IN_ASINH)
1753 CASE_MATHFN (BUILT_IN_ATAN)
1754 CASE_MATHFN (BUILT_IN_ATAN2)
1755 CASE_MATHFN (BUILT_IN_ATANH)
1756 CASE_MATHFN (BUILT_IN_CBRT)
1757 CASE_MATHFN (BUILT_IN_CEIL)
1758 CASE_MATHFN (BUILT_IN_CEXPI)
1759 CASE_MATHFN (BUILT_IN_COPYSIGN)
1760 CASE_MATHFN (BUILT_IN_COS)
1761 CASE_MATHFN (BUILT_IN_COSH)
1762 CASE_MATHFN (BUILT_IN_DREM)
1763 CASE_MATHFN (BUILT_IN_ERF)
1764 CASE_MATHFN (BUILT_IN_ERFC)
1765 CASE_MATHFN (BUILT_IN_EXP)
1766 CASE_MATHFN (BUILT_IN_EXP10)
1767 CASE_MATHFN (BUILT_IN_EXP2)
1768 CASE_MATHFN (BUILT_IN_EXPM1)
1769 CASE_MATHFN (BUILT_IN_FABS)
1770 CASE_MATHFN (BUILT_IN_FDIM)
1771 CASE_MATHFN (BUILT_IN_FLOOR)
1772 CASE_MATHFN (BUILT_IN_FMA)
1773 CASE_MATHFN (BUILT_IN_FMAX)
1774 CASE_MATHFN (BUILT_IN_FMIN)
1775 CASE_MATHFN (BUILT_IN_FMOD)
1776 CASE_MATHFN (BUILT_IN_FREXP)
1777 CASE_MATHFN (BUILT_IN_GAMMA)
1778 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1779 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1780 CASE_MATHFN (BUILT_IN_HYPOT)
1781 CASE_MATHFN (BUILT_IN_ILOGB)
1782 CASE_MATHFN (BUILT_IN_INF)
1783 CASE_MATHFN (BUILT_IN_ISINF)
1784 CASE_MATHFN (BUILT_IN_J0)
1785 CASE_MATHFN (BUILT_IN_J1)
1786 CASE_MATHFN (BUILT_IN_JN)
1787 CASE_MATHFN (BUILT_IN_LCEIL)
1788 CASE_MATHFN (BUILT_IN_LDEXP)
1789 CASE_MATHFN (BUILT_IN_LFLOOR)
1790 CASE_MATHFN (BUILT_IN_LGAMMA)
1791 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1792 CASE_MATHFN (BUILT_IN_LLCEIL)
1793 CASE_MATHFN (BUILT_IN_LLFLOOR)
1794 CASE_MATHFN (BUILT_IN_LLRINT)
1795 CASE_MATHFN (BUILT_IN_LLROUND)
1796 CASE_MATHFN (BUILT_IN_LOG)
1797 CASE_MATHFN (BUILT_IN_LOG10)
1798 CASE_MATHFN (BUILT_IN_LOG1P)
1799 CASE_MATHFN (BUILT_IN_LOG2)
1800 CASE_MATHFN (BUILT_IN_LOGB)
1801 CASE_MATHFN (BUILT_IN_LRINT)
1802 CASE_MATHFN (BUILT_IN_LROUND)
1803 CASE_MATHFN (BUILT_IN_MODF)
1804 CASE_MATHFN (BUILT_IN_NAN)
1805 CASE_MATHFN (BUILT_IN_NANS)
1806 CASE_MATHFN (BUILT_IN_NEARBYINT)
1807 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1808 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1809 CASE_MATHFN (BUILT_IN_POW)
1810 CASE_MATHFN (BUILT_IN_POWI)
1811 CASE_MATHFN (BUILT_IN_POW10)
1812 CASE_MATHFN (BUILT_IN_REMAINDER)
1813 CASE_MATHFN (BUILT_IN_REMQUO)
1814 CASE_MATHFN (BUILT_IN_RINT)
1815 CASE_MATHFN (BUILT_IN_ROUND)
1816 CASE_MATHFN (BUILT_IN_SCALB)
1817 CASE_MATHFN (BUILT_IN_SCALBLN)
1818 CASE_MATHFN (BUILT_IN_SCALBN)
1819 CASE_MATHFN (BUILT_IN_SIGNBIT)
1820 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1821 CASE_MATHFN (BUILT_IN_SIN)
1822 CASE_MATHFN (BUILT_IN_SINCOS)
1823 CASE_MATHFN (BUILT_IN_SINH)
1824 CASE_MATHFN (BUILT_IN_SQRT)
1825 CASE_MATHFN (BUILT_IN_TAN)
1826 CASE_MATHFN (BUILT_IN_TANH)
1827 CASE_MATHFN (BUILT_IN_TGAMMA)
1828 CASE_MATHFN (BUILT_IN_TRUNC)
1829 CASE_MATHFN (BUILT_IN_Y0)
1830 CASE_MATHFN (BUILT_IN_Y1)
1831 CASE_MATHFN (BUILT_IN_YN)
1833 default:
1834 return NULL_TREE;
1837 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1838 return fn_arr[fcode];
1839 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1840 return fn_arr[fcodef];
1841 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1842 return fn_arr[fcodel];
1843 else
1844 return NULL_TREE;
1847 /* Like mathfn_built_in_1(), but always use the implicit array. */
1849 tree
1850 mathfn_built_in (tree type, enum built_in_function fn)
1852 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1855 /* If errno must be maintained, expand the RTL to check if the result,
1856 TARGET, of a built-in function call, EXP, is NaN, and if so set
1857 errno to EDOM. */
1859 static void
1860 expand_errno_check (tree exp, rtx target)
1862 rtx lab = gen_label_rtx ();
1864 /* Test the result; if it is NaN, set errno=EDOM because
1865 the argument was not in the domain. */
1866 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1867 0, lab);
1869 #ifdef TARGET_EDOM
1870 /* If this built-in doesn't throw an exception, set errno directly. */
1871 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1873 #ifdef GEN_ERRNO_RTX
1874 rtx errno_rtx = GEN_ERRNO_RTX;
1875 #else
1876 rtx errno_rtx
1877 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1878 #endif
1879 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1880 emit_label (lab);
1881 return;
1883 #endif
1885 /* Make sure the library call isn't expanded as a tail call. */
1886 CALL_EXPR_TAILCALL (exp) = 0;
1888 /* We can't set errno=EDOM directly; let the library call do it.
1889 Pop the arguments right away in case the call gets deleted. */
1890 NO_DEFER_POP;
1891 expand_call (exp, target, 0);
1892 OK_DEFER_POP;
1893 emit_label (lab);
1896 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1897 Return NULL_RTX if a normal call should be emitted rather than expanding
1898 the function in-line. EXP is the expression that is a call to the builtin
1899 function; if convenient, the result should be placed in TARGET.
1900 SUBTARGET may be used as the target for computing one of EXP's operands. */
1902 static rtx
1903 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1905 optab builtin_optab;
1906 rtx op0, insns, before_call;
1907 tree fndecl = get_callee_fndecl (exp);
1908 enum machine_mode mode;
1909 bool errno_set = false;
1910 tree arg;
1912 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1913 return NULL_RTX;
1915 arg = CALL_EXPR_ARG (exp, 0);
1917 switch (DECL_FUNCTION_CODE (fndecl))
1919 CASE_FLT_FN (BUILT_IN_SQRT):
1920 errno_set = ! tree_expr_nonnegative_p (arg);
1921 builtin_optab = sqrt_optab;
1922 break;
1923 CASE_FLT_FN (BUILT_IN_EXP):
1924 errno_set = true; builtin_optab = exp_optab; break;
1925 CASE_FLT_FN (BUILT_IN_EXP10):
1926 CASE_FLT_FN (BUILT_IN_POW10):
1927 errno_set = true; builtin_optab = exp10_optab; break;
1928 CASE_FLT_FN (BUILT_IN_EXP2):
1929 errno_set = true; builtin_optab = exp2_optab; break;
1930 CASE_FLT_FN (BUILT_IN_EXPM1):
1931 errno_set = true; builtin_optab = expm1_optab; break;
1932 CASE_FLT_FN (BUILT_IN_LOGB):
1933 errno_set = true; builtin_optab = logb_optab; break;
1934 CASE_FLT_FN (BUILT_IN_LOG):
1935 errno_set = true; builtin_optab = log_optab; break;
1936 CASE_FLT_FN (BUILT_IN_LOG10):
1937 errno_set = true; builtin_optab = log10_optab; break;
1938 CASE_FLT_FN (BUILT_IN_LOG2):
1939 errno_set = true; builtin_optab = log2_optab; break;
1940 CASE_FLT_FN (BUILT_IN_LOG1P):
1941 errno_set = true; builtin_optab = log1p_optab; break;
1942 CASE_FLT_FN (BUILT_IN_ASIN):
1943 builtin_optab = asin_optab; break;
1944 CASE_FLT_FN (BUILT_IN_ACOS):
1945 builtin_optab = acos_optab; break;
1946 CASE_FLT_FN (BUILT_IN_TAN):
1947 builtin_optab = tan_optab; break;
1948 CASE_FLT_FN (BUILT_IN_ATAN):
1949 builtin_optab = atan_optab; break;
1950 CASE_FLT_FN (BUILT_IN_FLOOR):
1951 builtin_optab = floor_optab; break;
1952 CASE_FLT_FN (BUILT_IN_CEIL):
1953 builtin_optab = ceil_optab; break;
1954 CASE_FLT_FN (BUILT_IN_TRUNC):
1955 builtin_optab = btrunc_optab; break;
1956 CASE_FLT_FN (BUILT_IN_ROUND):
1957 builtin_optab = round_optab; break;
1958 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1959 builtin_optab = nearbyint_optab;
1960 if (flag_trapping_math)
1961 break;
1962 /* Else fallthrough and expand as rint. */
1963 CASE_FLT_FN (BUILT_IN_RINT):
1964 builtin_optab = rint_optab; break;
1965 default:
1966 gcc_unreachable ();
1969 /* Make a suitable register to place result in. */
1970 mode = TYPE_MODE (TREE_TYPE (exp));
1972 if (! flag_errno_math || ! HONOR_NANS (mode))
1973 errno_set = false;
1975 /* Before working hard, check whether the instruction is available. */
1976 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1978 target = gen_reg_rtx (mode);
1980 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1981 need to expand the argument again. This way, we will not perform
1982 side-effects more the once. */
1983 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1985 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1987 start_sequence ();
1989 /* Compute into TARGET.
1990 Set TARGET to wherever the result comes back. */
1991 target = expand_unop (mode, builtin_optab, op0, target, 0);
1993 if (target != 0)
1995 if (errno_set)
1996 expand_errno_check (exp, target);
1998 /* Output the entire sequence. */
1999 insns = get_insns ();
2000 end_sequence ();
2001 emit_insn (insns);
2002 return target;
2005 /* If we were unable to expand via the builtin, stop the sequence
2006 (without outputting the insns) and call to the library function
2007 with the stabilized argument list. */
2008 end_sequence ();
2011 before_call = get_last_insn ();
2013 return expand_call (exp, target, target == const0_rtx);
2016 /* Expand a call to the builtin binary math functions (pow and atan2).
2017 Return NULL_RTX if a normal call should be emitted rather than expanding the
2018 function in-line. EXP is the expression that is a call to the builtin
2019 function; if convenient, the result should be placed in TARGET.
2020 SUBTARGET may be used as the target for computing one of EXP's
2021 operands. */
2023 static rtx
2024 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2026 optab builtin_optab;
2027 rtx op0, op1, insns;
2028 int op1_type = REAL_TYPE;
2029 tree fndecl = get_callee_fndecl (exp);
2030 tree arg0, arg1;
2031 enum machine_mode mode;
2032 bool errno_set = true;
2034 switch (DECL_FUNCTION_CODE (fndecl))
2036 CASE_FLT_FN (BUILT_IN_SCALBN):
2037 CASE_FLT_FN (BUILT_IN_SCALBLN):
2038 CASE_FLT_FN (BUILT_IN_LDEXP):
2039 op1_type = INTEGER_TYPE;
2040 default:
2041 break;
2044 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2045 return NULL_RTX;
2047 arg0 = CALL_EXPR_ARG (exp, 0);
2048 arg1 = CALL_EXPR_ARG (exp, 1);
2050 switch (DECL_FUNCTION_CODE (fndecl))
2052 CASE_FLT_FN (BUILT_IN_POW):
2053 builtin_optab = pow_optab; break;
2054 CASE_FLT_FN (BUILT_IN_ATAN2):
2055 builtin_optab = atan2_optab; break;
2056 CASE_FLT_FN (BUILT_IN_SCALB):
2057 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2058 return 0;
2059 builtin_optab = scalb_optab; break;
2060 CASE_FLT_FN (BUILT_IN_SCALBN):
2061 CASE_FLT_FN (BUILT_IN_SCALBLN):
2062 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2063 return 0;
2064 /* Fall through... */
2065 CASE_FLT_FN (BUILT_IN_LDEXP):
2066 builtin_optab = ldexp_optab; break;
2067 CASE_FLT_FN (BUILT_IN_FMOD):
2068 builtin_optab = fmod_optab; break;
2069 CASE_FLT_FN (BUILT_IN_REMAINDER):
2070 CASE_FLT_FN (BUILT_IN_DREM):
2071 builtin_optab = remainder_optab; break;
2072 default:
2073 gcc_unreachable ();
2076 /* Make a suitable register to place result in. */
2077 mode = TYPE_MODE (TREE_TYPE (exp));
2079 /* Before working hard, check whether the instruction is available. */
2080 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2081 return NULL_RTX;
2083 target = gen_reg_rtx (mode);
2085 if (! flag_errno_math || ! HONOR_NANS (mode))
2086 errno_set = false;
2088 /* Always stabilize the argument list. */
2089 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2090 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2092 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2093 op1 = expand_normal (arg1);
2095 start_sequence ();
2097 /* Compute into TARGET.
2098 Set TARGET to wherever the result comes back. */
2099 target = expand_binop (mode, builtin_optab, op0, op1,
2100 target, 0, OPTAB_DIRECT);
2102 /* If we were unable to expand via the builtin, stop the sequence
2103 (without outputting the insns) and call to the library function
2104 with the stabilized argument list. */
2105 if (target == 0)
2107 end_sequence ();
2108 return expand_call (exp, target, target == const0_rtx);
2111 if (errno_set)
2112 expand_errno_check (exp, target);
2114 /* Output the entire sequence. */
2115 insns = get_insns ();
2116 end_sequence ();
2117 emit_insn (insns);
2119 return target;
2122 /* Expand a call to the builtin sin and cos math functions.
2123 Return NULL_RTX if a normal call should be emitted rather than expanding the
2124 function in-line. EXP is the expression that is a call to the builtin
2125 function; if convenient, the result should be placed in TARGET.
2126 SUBTARGET may be used as the target for computing one of EXP's
2127 operands. */
2129 static rtx
2130 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2132 optab builtin_optab;
2133 rtx op0, insns;
2134 tree fndecl = get_callee_fndecl (exp);
2135 enum machine_mode mode;
2136 tree arg;
2138 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2139 return NULL_RTX;
2141 arg = CALL_EXPR_ARG (exp, 0);
2143 switch (DECL_FUNCTION_CODE (fndecl))
2145 CASE_FLT_FN (BUILT_IN_SIN):
2146 CASE_FLT_FN (BUILT_IN_COS):
2147 builtin_optab = sincos_optab; break;
2148 default:
2149 gcc_unreachable ();
2152 /* Make a suitable register to place result in. */
2153 mode = TYPE_MODE (TREE_TYPE (exp));
2155 /* Check if sincos insn is available, otherwise fallback
2156 to sin or cos insn. */
2157 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2158 switch (DECL_FUNCTION_CODE (fndecl))
2160 CASE_FLT_FN (BUILT_IN_SIN):
2161 builtin_optab = sin_optab; break;
2162 CASE_FLT_FN (BUILT_IN_COS):
2163 builtin_optab = cos_optab; break;
2164 default:
2165 gcc_unreachable ();
2168 /* Before working hard, check whether the instruction is available. */
2169 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2171 target = gen_reg_rtx (mode);
2173 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2174 need to expand the argument again. This way, we will not perform
2175 side-effects more the once. */
2176 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2178 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2180 start_sequence ();
2182 /* Compute into TARGET.
2183 Set TARGET to wherever the result comes back. */
2184 if (builtin_optab == sincos_optab)
2186 int result;
2188 switch (DECL_FUNCTION_CODE (fndecl))
2190 CASE_FLT_FN (BUILT_IN_SIN):
2191 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2192 break;
2193 CASE_FLT_FN (BUILT_IN_COS):
2194 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2195 break;
2196 default:
2197 gcc_unreachable ();
2199 gcc_assert (result);
2201 else
2203 target = expand_unop (mode, builtin_optab, op0, target, 0);
2206 if (target != 0)
2208 /* Output the entire sequence. */
2209 insns = get_insns ();
2210 end_sequence ();
2211 emit_insn (insns);
2212 return target;
2215 /* If we were unable to expand via the builtin, stop the sequence
2216 (without outputting the insns) and call to the library function
2217 with the stabilized argument list. */
2218 end_sequence ();
2221 target = expand_call (exp, target, target == const0_rtx);
2223 return target;
2226 /* Expand a call to one of the builtin math functions that operate on
2227 floating point argument and output an integer result (ilogb, isinf,
2228 isnan, etc).
2229 Return 0 if a normal call should be emitted rather than expanding the
2230 function in-line. EXP is the expression that is a call to the builtin
2231 function; if convenient, the result should be placed in TARGET.
2232 SUBTARGET may be used as the target for computing one of EXP's operands. */
2234 static rtx
2235 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2237 optab builtin_optab = 0;
2238 enum insn_code icode = CODE_FOR_nothing;
2239 rtx op0;
2240 tree fndecl = get_callee_fndecl (exp);
2241 enum machine_mode mode;
2242 bool errno_set = false;
2243 tree arg;
2245 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2246 return NULL_RTX;
2248 arg = CALL_EXPR_ARG (exp, 0);
2250 switch (DECL_FUNCTION_CODE (fndecl))
2252 CASE_FLT_FN (BUILT_IN_ILOGB):
2253 errno_set = true; builtin_optab = ilogb_optab; break;
2254 CASE_FLT_FN (BUILT_IN_ISINF):
2255 builtin_optab = isinf_optab; break;
2256 case BUILT_IN_ISNORMAL:
2257 case BUILT_IN_ISFINITE:
2258 CASE_FLT_FN (BUILT_IN_FINITE):
2259 /* These builtins have no optabs (yet). */
2260 break;
2261 default:
2262 gcc_unreachable ();
2265 /* There's no easy way to detect the case we need to set EDOM. */
2266 if (flag_errno_math && errno_set)
2267 return NULL_RTX;
2269 /* Optab mode depends on the mode of the input argument. */
2270 mode = TYPE_MODE (TREE_TYPE (arg));
2272 if (builtin_optab)
2273 icode = optab_handler (builtin_optab, mode)->insn_code;
2275 /* Before working hard, check whether the instruction is available. */
2276 if (icode != CODE_FOR_nothing)
2278 /* Make a suitable register to place result in. */
2279 if (!target
2280 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2281 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2283 gcc_assert (insn_data[icode].operand[0].predicate
2284 (target, GET_MODE (target)));
2286 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2287 need to expand the argument again. This way, we will not perform
2288 side-effects more the once. */
2289 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2291 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2293 if (mode != GET_MODE (op0))
2294 op0 = convert_to_mode (mode, op0, 0);
2296 /* Compute into TARGET.
2297 Set TARGET to wherever the result comes back. */
2298 emit_unop_insn (icode, target, op0, UNKNOWN);
2299 return target;
2302 /* If there is no optab, try generic code. */
2303 switch (DECL_FUNCTION_CODE (fndecl))
2305 tree result;
2307 CASE_FLT_FN (BUILT_IN_ISINF):
2309 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2310 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2311 tree const type = TREE_TYPE (arg);
2312 REAL_VALUE_TYPE r;
2313 char buf[128];
2315 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2316 real_from_string (&r, buf);
2317 result = build_call_expr (isgr_fn, 2,
2318 fold_build1 (ABS_EXPR, type, arg),
2319 build_real (type, r));
2320 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2322 CASE_FLT_FN (BUILT_IN_FINITE):
2323 case BUILT_IN_ISFINITE:
2325 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2326 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2327 tree const type = TREE_TYPE (arg);
2328 REAL_VALUE_TYPE r;
2329 char buf[128];
2331 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2332 real_from_string (&r, buf);
2333 result = build_call_expr (isle_fn, 2,
2334 fold_build1 (ABS_EXPR, type, arg),
2335 build_real (type, r));
2336 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2338 case BUILT_IN_ISNORMAL:
2340 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2341 islessequal(fabs(x),DBL_MAX). */
2342 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2343 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2344 tree const type = TREE_TYPE (arg);
2345 REAL_VALUE_TYPE rmax, rmin;
2346 char buf[128];
2348 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2349 real_from_string (&rmax, buf);
2350 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2351 real_from_string (&rmin, buf);
2352 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2353 result = build_call_expr (isle_fn, 2, arg,
2354 build_real (type, rmax));
2355 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2356 build_call_expr (isge_fn, 2, arg,
2357 build_real (type, rmin)));
2358 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2360 default:
2361 break;
2364 target = expand_call (exp, target, target == const0_rtx);
2366 return target;
2369 /* Expand a call to the builtin sincos math function.
2370 Return NULL_RTX if a normal call should be emitted rather than expanding the
2371 function in-line. EXP is the expression that is a call to the builtin
2372 function. */
2374 static rtx
2375 expand_builtin_sincos (tree exp)
2377 rtx op0, op1, op2, target1, target2;
2378 enum machine_mode mode;
2379 tree arg, sinp, cosp;
2380 int result;
2382 if (!validate_arglist (exp, REAL_TYPE,
2383 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2384 return NULL_RTX;
2386 arg = CALL_EXPR_ARG (exp, 0);
2387 sinp = CALL_EXPR_ARG (exp, 1);
2388 cosp = CALL_EXPR_ARG (exp, 2);
2390 /* Make a suitable register to place result in. */
2391 mode = TYPE_MODE (TREE_TYPE (arg));
2393 /* Check if sincos insn is available, otherwise emit the call. */
2394 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2395 return NULL_RTX;
2397 target1 = gen_reg_rtx (mode);
2398 target2 = gen_reg_rtx (mode);
2400 op0 = expand_normal (arg);
2401 op1 = expand_normal (build_fold_indirect_ref (sinp));
2402 op2 = expand_normal (build_fold_indirect_ref (cosp));
2404 /* Compute into target1 and target2.
2405 Set TARGET to wherever the result comes back. */
2406 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2407 gcc_assert (result);
2409 /* Move target1 and target2 to the memory locations indicated
2410 by op1 and op2. */
2411 emit_move_insn (op1, target1);
2412 emit_move_insn (op2, target2);
2414 return const0_rtx;
2417 /* Expand a call to the internal cexpi builtin to the sincos math function.
2418 EXP is the expression that is a call to the builtin function; if convenient,
2419 the result should be placed in TARGET. SUBTARGET may be used as the target
2420 for computing one of EXP's operands. */
2422 static rtx
2423 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2425 tree fndecl = get_callee_fndecl (exp);
2426 tree arg, type;
2427 enum machine_mode mode;
2428 rtx op0, op1, op2;
2430 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2431 return NULL_RTX;
2433 arg = CALL_EXPR_ARG (exp, 0);
2434 type = TREE_TYPE (arg);
2435 mode = TYPE_MODE (TREE_TYPE (arg));
2437 /* Try expanding via a sincos optab, fall back to emitting a libcall
2438 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2439 is only generated from sincos, cexp or if we have either of them. */
2440 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2442 op1 = gen_reg_rtx (mode);
2443 op2 = gen_reg_rtx (mode);
2445 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2447 /* Compute into op1 and op2. */
2448 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2450 else if (TARGET_HAS_SINCOS)
2452 tree call, fn = NULL_TREE;
2453 tree top1, top2;
2454 rtx op1a, op2a;
2456 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2457 fn = built_in_decls[BUILT_IN_SINCOSF];
2458 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2459 fn = built_in_decls[BUILT_IN_SINCOS];
2460 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2461 fn = built_in_decls[BUILT_IN_SINCOSL];
2462 else
2463 gcc_unreachable ();
2465 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2466 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2467 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2468 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2469 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2470 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2472 /* Make sure not to fold the sincos call again. */
2473 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2474 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2475 call, 3, arg, top1, top2));
2477 else
2479 tree call, fn = NULL_TREE, narg;
2480 tree ctype = build_complex_type (type);
2482 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2483 fn = built_in_decls[BUILT_IN_CEXPF];
2484 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2485 fn = built_in_decls[BUILT_IN_CEXP];
2486 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2487 fn = built_in_decls[BUILT_IN_CEXPL];
2488 else
2489 gcc_unreachable ();
2491 /* If we don't have a decl for cexp create one. This is the
2492 friendliest fallback if the user calls __builtin_cexpi
2493 without full target C99 function support. */
2494 if (fn == NULL_TREE)
2496 tree fntype;
2497 const char *name = NULL;
2499 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2500 name = "cexpf";
2501 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2502 name = "cexp";
2503 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2504 name = "cexpl";
2506 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2507 fn = build_fn_decl (name, fntype);
2510 narg = fold_build2 (COMPLEX_EXPR, ctype,
2511 build_real (type, dconst0), arg);
2513 /* Make sure not to fold the cexp call again. */
2514 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2515 return expand_expr (build_call_nary (ctype, call, 1, narg),
2516 target, VOIDmode, EXPAND_NORMAL);
2519 /* Now build the proper return type. */
2520 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2521 make_tree (TREE_TYPE (arg), op2),
2522 make_tree (TREE_TYPE (arg), op1)),
2523 target, VOIDmode, EXPAND_NORMAL);
2526 /* Expand a call to one of the builtin rounding functions gcc defines
2527 as an extension (lfloor and lceil). As these are gcc extensions we
2528 do not need to worry about setting errno to EDOM.
2529 If expanding via optab fails, lower expression to (int)(floor(x)).
2530 EXP is the expression that is a call to the builtin function;
2531 if convenient, the result should be placed in TARGET. */
2533 static rtx
2534 expand_builtin_int_roundingfn (tree exp, rtx target)
2536 convert_optab builtin_optab;
2537 rtx op0, insns, tmp;
2538 tree fndecl = get_callee_fndecl (exp);
2539 enum built_in_function fallback_fn;
2540 tree fallback_fndecl;
2541 enum machine_mode mode;
2542 tree arg;
2544 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2545 gcc_unreachable ();
2547 arg = CALL_EXPR_ARG (exp, 0);
2549 switch (DECL_FUNCTION_CODE (fndecl))
2551 CASE_FLT_FN (BUILT_IN_LCEIL):
2552 CASE_FLT_FN (BUILT_IN_LLCEIL):
2553 builtin_optab = lceil_optab;
2554 fallback_fn = BUILT_IN_CEIL;
2555 break;
2557 CASE_FLT_FN (BUILT_IN_LFLOOR):
2558 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2559 builtin_optab = lfloor_optab;
2560 fallback_fn = BUILT_IN_FLOOR;
2561 break;
2563 default:
2564 gcc_unreachable ();
2567 /* Make a suitable register to place result in. */
2568 mode = TYPE_MODE (TREE_TYPE (exp));
2570 target = gen_reg_rtx (mode);
2572 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2573 need to expand the argument again. This way, we will not perform
2574 side-effects more the once. */
2575 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2577 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2579 start_sequence ();
2581 /* Compute into TARGET. */
2582 if (expand_sfix_optab (target, op0, builtin_optab))
2584 /* Output the entire sequence. */
2585 insns = get_insns ();
2586 end_sequence ();
2587 emit_insn (insns);
2588 return target;
2591 /* If we were unable to expand via the builtin, stop the sequence
2592 (without outputting the insns). */
2593 end_sequence ();
2595 /* Fall back to floating point rounding optab. */
2596 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2598 /* For non-C99 targets we may end up without a fallback fndecl here
2599 if the user called __builtin_lfloor directly. In this case emit
2600 a call to the floor/ceil variants nevertheless. This should result
2601 in the best user experience for not full C99 targets. */
2602 if (fallback_fndecl == NULL_TREE)
2604 tree fntype;
2605 const char *name = NULL;
2607 switch (DECL_FUNCTION_CODE (fndecl))
2609 case BUILT_IN_LCEIL:
2610 case BUILT_IN_LLCEIL:
2611 name = "ceil";
2612 break;
2613 case BUILT_IN_LCEILF:
2614 case BUILT_IN_LLCEILF:
2615 name = "ceilf";
2616 break;
2617 case BUILT_IN_LCEILL:
2618 case BUILT_IN_LLCEILL:
2619 name = "ceill";
2620 break;
2621 case BUILT_IN_LFLOOR:
2622 case BUILT_IN_LLFLOOR:
2623 name = "floor";
2624 break;
2625 case BUILT_IN_LFLOORF:
2626 case BUILT_IN_LLFLOORF:
2627 name = "floorf";
2628 break;
2629 case BUILT_IN_LFLOORL:
2630 case BUILT_IN_LLFLOORL:
2631 name = "floorl";
2632 break;
2633 default:
2634 gcc_unreachable ();
2637 fntype = build_function_type_list (TREE_TYPE (arg),
2638 TREE_TYPE (arg), NULL_TREE);
2639 fallback_fndecl = build_fn_decl (name, fntype);
2642 exp = build_call_expr (fallback_fndecl, 1, arg);
2644 tmp = expand_normal (exp);
2646 /* Truncate the result of floating point optab to integer
2647 via expand_fix (). */
2648 target = gen_reg_rtx (mode);
2649 expand_fix (target, tmp, 0);
2651 return target;
2654 /* Expand a call to one of the builtin math functions doing integer
2655 conversion (lrint).
2656 Return 0 if a normal call should be emitted rather than expanding the
2657 function in-line. EXP is the expression that is a call to the builtin
2658 function; if convenient, the result should be placed in TARGET. */
2660 static rtx
2661 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2663 convert_optab builtin_optab;
2664 rtx op0, insns;
2665 tree fndecl = get_callee_fndecl (exp);
2666 tree arg;
2667 enum machine_mode mode;
2669 /* There's no easy way to detect the case we need to set EDOM. */
2670 if (flag_errno_math)
2671 return NULL_RTX;
2673 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2674 gcc_unreachable ();
2676 arg = CALL_EXPR_ARG (exp, 0);
2678 switch (DECL_FUNCTION_CODE (fndecl))
2680 CASE_FLT_FN (BUILT_IN_LRINT):
2681 CASE_FLT_FN (BUILT_IN_LLRINT):
2682 builtin_optab = lrint_optab; break;
2683 CASE_FLT_FN (BUILT_IN_LROUND):
2684 CASE_FLT_FN (BUILT_IN_LLROUND):
2685 builtin_optab = lround_optab; break;
2686 default:
2687 gcc_unreachable ();
2690 /* Make a suitable register to place result in. */
2691 mode = TYPE_MODE (TREE_TYPE (exp));
2693 target = gen_reg_rtx (mode);
2695 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2696 need to expand the argument again. This way, we will not perform
2697 side-effects more the once. */
2698 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2700 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2702 start_sequence ();
2704 if (expand_sfix_optab (target, op0, builtin_optab))
2706 /* Output the entire sequence. */
2707 insns = get_insns ();
2708 end_sequence ();
2709 emit_insn (insns);
2710 return target;
2713 /* If we were unable to expand via the builtin, stop the sequence
2714 (without outputting the insns) and call to the library function
2715 with the stabilized argument list. */
2716 end_sequence ();
2718 target = expand_call (exp, target, target == const0_rtx);
2720 return target;
2723 /* To evaluate powi(x,n), the floating point value x raised to the
2724 constant integer exponent n, we use a hybrid algorithm that
2725 combines the "window method" with look-up tables. For an
2726 introduction to exponentiation algorithms and "addition chains",
2727 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2728 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2729 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2730 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2732 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2733 multiplications to inline before calling the system library's pow
2734 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2735 so this default never requires calling pow, powf or powl. */
2737 #ifndef POWI_MAX_MULTS
2738 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2739 #endif
2741 /* The size of the "optimal power tree" lookup table. All
2742 exponents less than this value are simply looked up in the
2743 powi_table below. This threshold is also used to size the
2744 cache of pseudo registers that hold intermediate results. */
2745 #define POWI_TABLE_SIZE 256
2747 /* The size, in bits of the window, used in the "window method"
2748 exponentiation algorithm. This is equivalent to a radix of
2749 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2750 #define POWI_WINDOW_SIZE 3
2752 /* The following table is an efficient representation of an
2753 "optimal power tree". For each value, i, the corresponding
2754 value, j, in the table states than an optimal evaluation
2755 sequence for calculating pow(x,i) can be found by evaluating
2756 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2757 100 integers is given in Knuth's "Seminumerical algorithms". */
2759 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2761 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2762 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2763 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2764 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2765 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2766 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2767 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2768 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2769 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2770 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2771 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2772 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2773 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2774 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2775 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2776 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2777 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2778 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2779 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2780 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2781 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2782 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2783 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2784 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2785 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2786 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2787 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2788 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2789 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2790 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2791 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2792 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2796 /* Return the number of multiplications required to calculate
2797 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2798 subroutine of powi_cost. CACHE is an array indicating
2799 which exponents have already been calculated. */
2801 static int
2802 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2804 /* If we've already calculated this exponent, then this evaluation
2805 doesn't require any additional multiplications. */
2806 if (cache[n])
2807 return 0;
2809 cache[n] = true;
2810 return powi_lookup_cost (n - powi_table[n], cache)
2811 + powi_lookup_cost (powi_table[n], cache) + 1;
2814 /* Return the number of multiplications required to calculate
2815 powi(x,n) for an arbitrary x, given the exponent N. This
2816 function needs to be kept in sync with expand_powi below. */
2818 static int
2819 powi_cost (HOST_WIDE_INT n)
2821 bool cache[POWI_TABLE_SIZE];
2822 unsigned HOST_WIDE_INT digit;
2823 unsigned HOST_WIDE_INT val;
2824 int result;
2826 if (n == 0)
2827 return 0;
2829 /* Ignore the reciprocal when calculating the cost. */
2830 val = (n < 0) ? -n : n;
2832 /* Initialize the exponent cache. */
2833 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2834 cache[1] = true;
2836 result = 0;
2838 while (val >= POWI_TABLE_SIZE)
2840 if (val & 1)
2842 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2843 result += powi_lookup_cost (digit, cache)
2844 + POWI_WINDOW_SIZE + 1;
2845 val >>= POWI_WINDOW_SIZE;
2847 else
2849 val >>= 1;
2850 result++;
2854 return result + powi_lookup_cost (val, cache);
2857 /* Recursive subroutine of expand_powi. This function takes the array,
2858 CACHE, of already calculated exponents and an exponent N and returns
2859 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2861 static rtx
2862 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2864 unsigned HOST_WIDE_INT digit;
2865 rtx target, result;
2866 rtx op0, op1;
2868 if (n < POWI_TABLE_SIZE)
2870 if (cache[n])
2871 return cache[n];
2873 target = gen_reg_rtx (mode);
2874 cache[n] = target;
2876 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2877 op1 = expand_powi_1 (mode, powi_table[n], cache);
2879 else if (n & 1)
2881 target = gen_reg_rtx (mode);
2882 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2883 op0 = expand_powi_1 (mode, n - digit, cache);
2884 op1 = expand_powi_1 (mode, digit, cache);
2886 else
2888 target = gen_reg_rtx (mode);
2889 op0 = expand_powi_1 (mode, n >> 1, cache);
2890 op1 = op0;
2893 result = expand_mult (mode, op0, op1, target, 0);
2894 if (result != target)
2895 emit_move_insn (target, result);
2896 return target;
2899 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2900 floating point operand in mode MODE, and N is the exponent. This
2901 function needs to be kept in sync with powi_cost above. */
2903 static rtx
2904 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2906 unsigned HOST_WIDE_INT val;
2907 rtx cache[POWI_TABLE_SIZE];
2908 rtx result;
2910 if (n == 0)
2911 return CONST1_RTX (mode);
2913 val = (n < 0) ? -n : n;
2915 memset (cache, 0, sizeof (cache));
2916 cache[1] = x;
2918 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2920 /* If the original exponent was negative, reciprocate the result. */
2921 if (n < 0)
2922 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2923 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2925 return result;
2928 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2929 a normal call should be emitted rather than expanding the function
2930 in-line. EXP is the expression that is a call to the builtin
2931 function; if convenient, the result should be placed in TARGET. */
2933 static rtx
2934 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2936 tree arg0, arg1;
2937 tree fn, narg0;
2938 tree type = TREE_TYPE (exp);
2939 REAL_VALUE_TYPE cint, c, c2;
2940 HOST_WIDE_INT n;
2941 rtx op, op2;
2942 enum machine_mode mode = TYPE_MODE (type);
2944 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2945 return NULL_RTX;
2947 arg0 = CALL_EXPR_ARG (exp, 0);
2948 arg1 = CALL_EXPR_ARG (exp, 1);
2950 if (TREE_CODE (arg1) != REAL_CST
2951 || TREE_OVERFLOW (arg1))
2952 return expand_builtin_mathfn_2 (exp, target, subtarget);
2954 /* Handle constant exponents. */
2956 /* For integer valued exponents we can expand to an optimal multiplication
2957 sequence using expand_powi. */
2958 c = TREE_REAL_CST (arg1);
2959 n = real_to_integer (&c);
2960 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2961 if (real_identical (&c, &cint)
2962 && ((n >= -1 && n <= 2)
2963 || (flag_unsafe_math_optimizations
2964 && optimize_insn_for_speed_p ()
2965 && powi_cost (n) <= POWI_MAX_MULTS)))
2967 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2968 if (n != 1)
2970 op = force_reg (mode, op);
2971 op = expand_powi (op, mode, n);
2973 return op;
2976 narg0 = builtin_save_expr (arg0);
2978 /* If the exponent is not integer valued, check if it is half of an integer.
2979 In this case we can expand to sqrt (x) * x**(n/2). */
2980 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2981 if (fn != NULL_TREE)
2983 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2984 n = real_to_integer (&c2);
2985 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2986 if (real_identical (&c2, &cint)
2987 && ((flag_unsafe_math_optimizations
2988 && optimize_insn_for_speed_p ()
2989 && powi_cost (n/2) <= POWI_MAX_MULTS)
2990 || n == 1))
2992 tree call_expr = build_call_expr (fn, 1, narg0);
2993 /* Use expand_expr in case the newly built call expression
2994 was folded to a non-call. */
2995 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2996 if (n != 1)
2998 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2999 op2 = force_reg (mode, op2);
3000 op2 = expand_powi (op2, mode, abs (n / 2));
3001 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3002 0, OPTAB_LIB_WIDEN);
3003 /* If the original exponent was negative, reciprocate the
3004 result. */
3005 if (n < 0)
3006 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3007 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3009 return op;
3013 /* Try if the exponent is a third of an integer. In this case
3014 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3015 different from pow (x, 1./3.) due to rounding and behavior
3016 with negative x we need to constrain this transformation to
3017 unsafe math and positive x or finite math. */
3018 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3019 if (fn != NULL_TREE
3020 && flag_unsafe_math_optimizations
3021 && (tree_expr_nonnegative_p (arg0)
3022 || !HONOR_NANS (mode)))
3024 REAL_VALUE_TYPE dconst3;
3025 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3026 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3027 real_round (&c2, mode, &c2);
3028 n = real_to_integer (&c2);
3029 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3030 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3031 real_convert (&c2, mode, &c2);
3032 if (real_identical (&c2, &c)
3033 && ((optimize_insn_for_speed_p ()
3034 && powi_cost (n/3) <= POWI_MAX_MULTS)
3035 || n == 1))
3037 tree call_expr = build_call_expr (fn, 1,narg0);
3038 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3039 if (abs (n) % 3 == 2)
3040 op = expand_simple_binop (mode, MULT, op, op, op,
3041 0, OPTAB_LIB_WIDEN);
3042 if (n != 1)
3044 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3045 op2 = force_reg (mode, op2);
3046 op2 = expand_powi (op2, mode, abs (n / 3));
3047 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3048 0, OPTAB_LIB_WIDEN);
3049 /* If the original exponent was negative, reciprocate the
3050 result. */
3051 if (n < 0)
3052 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3053 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3055 return op;
3059 /* Fall back to optab expansion. */
3060 return expand_builtin_mathfn_2 (exp, target, subtarget);
3063 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3064 a normal call should be emitted rather than expanding the function
3065 in-line. EXP is the expression that is a call to the builtin
3066 function; if convenient, the result should be placed in TARGET. */
3068 static rtx
3069 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3071 tree arg0, arg1;
3072 rtx op0, op1;
3073 enum machine_mode mode;
3074 enum machine_mode mode2;
3076 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3077 return NULL_RTX;
3079 arg0 = CALL_EXPR_ARG (exp, 0);
3080 arg1 = CALL_EXPR_ARG (exp, 1);
3081 mode = TYPE_MODE (TREE_TYPE (exp));
3083 /* Handle constant power. */
3085 if (TREE_CODE (arg1) == INTEGER_CST
3086 && !TREE_OVERFLOW (arg1))
3088 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3090 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3091 Otherwise, check the number of multiplications required. */
3092 if ((TREE_INT_CST_HIGH (arg1) == 0
3093 || TREE_INT_CST_HIGH (arg1) == -1)
3094 && ((n >= -1 && n <= 2)
3095 || (optimize_insn_for_speed_p ()
3096 && powi_cost (n) <= POWI_MAX_MULTS)))
3098 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3099 op0 = force_reg (mode, op0);
3100 return expand_powi (op0, mode, n);
3104 /* Emit a libcall to libgcc. */
3106 /* Mode of the 2nd argument must match that of an int. */
3107 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3109 if (target == NULL_RTX)
3110 target = gen_reg_rtx (mode);
3112 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3113 if (GET_MODE (op0) != mode)
3114 op0 = convert_to_mode (mode, op0, 0);
3115 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3116 if (GET_MODE (op1) != mode2)
3117 op1 = convert_to_mode (mode2, op1, 0);
3119 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3120 target, LCT_CONST, mode, 2,
3121 op0, mode, op1, mode2);
3123 return target;
3126 /* Expand expression EXP which is a call to the strlen builtin. Return
3127 NULL_RTX if we failed the caller should emit a normal call, otherwise
3128 try to get the result in TARGET, if convenient. */
3130 static rtx
3131 expand_builtin_strlen (tree exp, rtx target,
3132 enum machine_mode target_mode)
3134 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3135 return NULL_RTX;
3136 else
3138 rtx pat;
3139 tree len;
3140 tree src = CALL_EXPR_ARG (exp, 0);
3141 rtx result, src_reg, char_rtx, before_strlen;
3142 enum machine_mode insn_mode = target_mode, char_mode;
3143 enum insn_code icode = CODE_FOR_nothing;
3144 int align;
3146 /* If the length can be computed at compile-time, return it. */
3147 len = c_strlen (src, 0);
3148 if (len)
3149 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3151 /* If the length can be computed at compile-time and is constant
3152 integer, but there are side-effects in src, evaluate
3153 src for side-effects, then return len.
3154 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3155 can be optimized into: i++; x = 3; */
3156 len = c_strlen (src, 1);
3157 if (len && TREE_CODE (len) == INTEGER_CST)
3159 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3160 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3163 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3165 /* If SRC is not a pointer type, don't do this operation inline. */
3166 if (align == 0)
3167 return NULL_RTX;
3169 /* Bail out if we can't compute strlen in the right mode. */
3170 while (insn_mode != VOIDmode)
3172 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3173 if (icode != CODE_FOR_nothing)
3174 break;
3176 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3178 if (insn_mode == VOIDmode)
3179 return NULL_RTX;
3181 /* Make a place to write the result of the instruction. */
3182 result = target;
3183 if (! (result != 0
3184 && REG_P (result)
3185 && GET_MODE (result) == insn_mode
3186 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3187 result = gen_reg_rtx (insn_mode);
3189 /* Make a place to hold the source address. We will not expand
3190 the actual source until we are sure that the expansion will
3191 not fail -- there are trees that cannot be expanded twice. */
3192 src_reg = gen_reg_rtx (Pmode);
3194 /* Mark the beginning of the strlen sequence so we can emit the
3195 source operand later. */
3196 before_strlen = get_last_insn ();
3198 char_rtx = const0_rtx;
3199 char_mode = insn_data[(int) icode].operand[2].mode;
3200 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3201 char_mode))
3202 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3204 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3205 char_rtx, GEN_INT (align));
3206 if (! pat)
3207 return NULL_RTX;
3208 emit_insn (pat);
3210 /* Now that we are assured of success, expand the source. */
3211 start_sequence ();
3212 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3213 if (pat != src_reg)
3214 emit_move_insn (src_reg, pat);
3215 pat = get_insns ();
3216 end_sequence ();
3218 if (before_strlen)
3219 emit_insn_after (pat, before_strlen);
3220 else
3221 emit_insn_before (pat, get_insns ());
3223 /* Return the value in the proper mode for this function. */
3224 if (GET_MODE (result) == target_mode)
3225 target = result;
3226 else if (target != 0)
3227 convert_move (target, result, 0);
3228 else
3229 target = convert_to_mode (target_mode, result, 0);
3231 return target;
3235 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3236 caller should emit a normal call, otherwise try to get the result
3237 in TARGET, if convenient (and in mode MODE if that's convenient). */
3239 static rtx
3240 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3242 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3244 tree type = TREE_TYPE (exp);
3245 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3246 CALL_EXPR_ARG (exp, 1), type);
3247 if (result)
3248 return expand_expr (result, target, mode, EXPAND_NORMAL);
3250 return NULL_RTX;
3253 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3254 caller should emit a normal call, otherwise try to get the result
3255 in TARGET, if convenient (and in mode MODE if that's convenient). */
3257 static rtx
3258 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3260 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3262 tree type = TREE_TYPE (exp);
3263 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3264 CALL_EXPR_ARG (exp, 1), type);
3265 if (result)
3266 return expand_expr (result, target, mode, EXPAND_NORMAL);
3268 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3270 return NULL_RTX;
3273 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3274 caller should emit a normal call, otherwise try to get the result
3275 in TARGET, if convenient (and in mode MODE if that's convenient). */
3277 static rtx
3278 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3280 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3282 tree type = TREE_TYPE (exp);
3283 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3284 CALL_EXPR_ARG (exp, 1), type);
3285 if (result)
3286 return expand_expr (result, target, mode, EXPAND_NORMAL);
3288 return NULL_RTX;
3291 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3292 caller should emit a normal call, otherwise try to get the result
3293 in TARGET, if convenient (and in mode MODE if that's convenient). */
3295 static rtx
3296 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3298 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3300 tree type = TREE_TYPE (exp);
3301 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3302 CALL_EXPR_ARG (exp, 1), type);
3303 if (result)
3304 return expand_expr (result, target, mode, EXPAND_NORMAL);
3306 return NULL_RTX;
3309 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3310 bytes from constant string DATA + OFFSET and return it as target
3311 constant. */
3313 static rtx
3314 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3315 enum machine_mode mode)
3317 const char *str = (const char *) data;
3319 gcc_assert (offset >= 0
3320 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3321 <= strlen (str) + 1));
3323 return c_readstr (str + offset, mode);
3326 /* Expand a call EXP to the memcpy builtin.
3327 Return NULL_RTX if we failed, the caller should emit a normal call,
3328 otherwise try to get the result in TARGET, if convenient (and in
3329 mode MODE if that's convenient). */
3331 static rtx
3332 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3334 tree fndecl = get_callee_fndecl (exp);
3336 if (!validate_arglist (exp,
3337 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3338 return NULL_RTX;
3339 else
3341 tree dest = CALL_EXPR_ARG (exp, 0);
3342 tree src = CALL_EXPR_ARG (exp, 1);
3343 tree len = CALL_EXPR_ARG (exp, 2);
3344 const char *src_str;
3345 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3346 unsigned int dest_align
3347 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3348 rtx dest_mem, src_mem, dest_addr, len_rtx;
3349 tree result = fold_builtin_memory_op (dest, src, len,
3350 TREE_TYPE (TREE_TYPE (fndecl)),
3351 false, /*endp=*/0);
3352 HOST_WIDE_INT expected_size = -1;
3353 unsigned int expected_align = 0;
3354 tree_ann_common_t ann;
3356 if (result)
3358 while (TREE_CODE (result) == COMPOUND_EXPR)
3360 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3361 EXPAND_NORMAL);
3362 result = TREE_OPERAND (result, 1);
3364 return expand_expr (result, target, mode, EXPAND_NORMAL);
3367 /* If DEST is not a pointer type, call the normal function. */
3368 if (dest_align == 0)
3369 return NULL_RTX;
3371 /* If either SRC is not a pointer type, don't do this
3372 operation in-line. */
3373 if (src_align == 0)
3374 return NULL_RTX;
3376 ann = tree_common_ann (exp);
3377 if (ann)
3378 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3380 if (expected_align < dest_align)
3381 expected_align = dest_align;
3382 dest_mem = get_memory_rtx (dest, len);
3383 set_mem_align (dest_mem, dest_align);
3384 len_rtx = expand_normal (len);
3385 src_str = c_getstr (src);
3387 /* If SRC is a string constant and block move would be done
3388 by pieces, we can avoid loading the string from memory
3389 and only stored the computed constants. */
3390 if (src_str
3391 && GET_CODE (len_rtx) == CONST_INT
3392 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3393 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3394 CONST_CAST (char *, src_str),
3395 dest_align, false))
3397 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3398 builtin_memcpy_read_str,
3399 CONST_CAST (char *, src_str),
3400 dest_align, false, 0);
3401 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3402 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3403 return dest_mem;
3406 src_mem = get_memory_rtx (src, len);
3407 set_mem_align (src_mem, src_align);
3409 /* Copy word part most expediently. */
3410 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3411 CALL_EXPR_TAILCALL (exp)
3412 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3413 expected_align, expected_size);
3415 if (dest_addr == 0)
3417 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3418 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3420 return dest_addr;
3424 /* Expand a call EXP to the mempcpy builtin.
3425 Return NULL_RTX if we failed; the caller should emit a normal call,
3426 otherwise try to get the result in TARGET, if convenient (and in
3427 mode MODE if that's convenient). If ENDP is 0 return the
3428 destination pointer, if ENDP is 1 return the end pointer ala
3429 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3430 stpcpy. */
3432 static rtx
3433 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3435 if (!validate_arglist (exp,
3436 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3437 return NULL_RTX;
3438 else
3440 tree dest = CALL_EXPR_ARG (exp, 0);
3441 tree src = CALL_EXPR_ARG (exp, 1);
3442 tree len = CALL_EXPR_ARG (exp, 2);
3443 return expand_builtin_mempcpy_args (dest, src, len,
3444 TREE_TYPE (exp),
3445 target, mode, /*endp=*/ 1);
3449 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3450 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3451 so that this can also be called without constructing an actual CALL_EXPR.
3452 TYPE is the return type of the call. The other arguments and return value
3453 are the same as for expand_builtin_mempcpy. */
3455 static rtx
3456 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3457 rtx target, enum machine_mode mode, int endp)
3459 /* If return value is ignored, transform mempcpy into memcpy. */
3460 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3462 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3463 tree result = build_call_expr (fn, 3, dest, src, len);
3465 while (TREE_CODE (result) == COMPOUND_EXPR)
3467 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3468 EXPAND_NORMAL);
3469 result = TREE_OPERAND (result, 1);
3471 return expand_expr (result, target, mode, EXPAND_NORMAL);
3473 else
3475 const char *src_str;
3476 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3477 unsigned int dest_align
3478 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3479 rtx dest_mem, src_mem, len_rtx;
3480 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3482 if (result)
3484 while (TREE_CODE (result) == COMPOUND_EXPR)
3486 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3487 EXPAND_NORMAL);
3488 result = TREE_OPERAND (result, 1);
3490 return expand_expr (result, target, mode, EXPAND_NORMAL);
3493 /* If either SRC or DEST is not a pointer type, don't do this
3494 operation in-line. */
3495 if (dest_align == 0 || src_align == 0)
3496 return NULL_RTX;
3498 /* If LEN is not constant, call the normal function. */
3499 if (! host_integerp (len, 1))
3500 return NULL_RTX;
3502 len_rtx = expand_normal (len);
3503 src_str = c_getstr (src);
3505 /* If SRC is a string constant and block move would be done
3506 by pieces, we can avoid loading the string from memory
3507 and only stored the computed constants. */
3508 if (src_str
3509 && GET_CODE (len_rtx) == CONST_INT
3510 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3511 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3512 CONST_CAST (char *, src_str),
3513 dest_align, false))
3515 dest_mem = get_memory_rtx (dest, len);
3516 set_mem_align (dest_mem, dest_align);
3517 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3518 builtin_memcpy_read_str,
3519 CONST_CAST (char *, src_str),
3520 dest_align, false, endp);
3521 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3522 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3523 return dest_mem;
3526 if (GET_CODE (len_rtx) == CONST_INT
3527 && can_move_by_pieces (INTVAL (len_rtx),
3528 MIN (dest_align, src_align)))
3530 dest_mem = get_memory_rtx (dest, len);
3531 set_mem_align (dest_mem, dest_align);
3532 src_mem = get_memory_rtx (src, len);
3533 set_mem_align (src_mem, src_align);
3534 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3535 MIN (dest_align, src_align), endp);
3536 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3537 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3538 return dest_mem;
3541 return NULL_RTX;
3545 /* Expand expression EXP, which is a call to the memmove builtin. Return
3546 NULL_RTX if we failed; the caller should emit a normal call. */
3548 static rtx
3549 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3551 if (!validate_arglist (exp,
3552 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3553 return NULL_RTX;
3554 else
3556 tree dest = CALL_EXPR_ARG (exp, 0);
3557 tree src = CALL_EXPR_ARG (exp, 1);
3558 tree len = CALL_EXPR_ARG (exp, 2);
3559 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3560 target, mode, ignore);
3564 /* Helper function to do the actual work for expand_builtin_memmove. The
3565 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3566 so that this can also be called without constructing an actual CALL_EXPR.
3567 TYPE is the return type of the call. The other arguments and return value
3568 are the same as for expand_builtin_memmove. */
3570 static rtx
3571 expand_builtin_memmove_args (tree dest, tree src, tree len,
3572 tree type, rtx target, enum machine_mode mode,
3573 int ignore)
3575 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3577 if (result)
3579 STRIP_TYPE_NOPS (result);
3580 while (TREE_CODE (result) == COMPOUND_EXPR)
3582 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3583 EXPAND_NORMAL);
3584 result = TREE_OPERAND (result, 1);
3586 return expand_expr (result, target, mode, EXPAND_NORMAL);
3589 /* Otherwise, call the normal function. */
3590 return NULL_RTX;
3593 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3594 NULL_RTX if we failed the caller should emit a normal call. */
3596 static rtx
3597 expand_builtin_bcopy (tree exp, int ignore)
3599 tree type = TREE_TYPE (exp);
3600 tree src, dest, size;
3602 if (!validate_arglist (exp,
3603 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3604 return NULL_RTX;
3606 src = CALL_EXPR_ARG (exp, 0);
3607 dest = CALL_EXPR_ARG (exp, 1);
3608 size = CALL_EXPR_ARG (exp, 2);
3610 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3611 This is done this way so that if it isn't expanded inline, we fall
3612 back to calling bcopy instead of memmove. */
3613 return expand_builtin_memmove_args (dest, src,
3614 fold_convert (sizetype, size),
3615 type, const0_rtx, VOIDmode,
3616 ignore);
3619 #ifndef HAVE_movstr
3620 # define HAVE_movstr 0
3621 # define CODE_FOR_movstr CODE_FOR_nothing
3622 #endif
3624 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3625 we failed, the caller should emit a normal call, otherwise try to
3626 get the result in TARGET, if convenient. If ENDP is 0 return the
3627 destination pointer, if ENDP is 1 return the end pointer ala
3628 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3629 stpcpy. */
3631 static rtx
3632 expand_movstr (tree dest, tree src, rtx target, int endp)
3634 rtx end;
3635 rtx dest_mem;
3636 rtx src_mem;
3637 rtx insn;
3638 const struct insn_data * data;
3640 if (!HAVE_movstr)
3641 return NULL_RTX;
3643 dest_mem = get_memory_rtx (dest, NULL);
3644 src_mem = get_memory_rtx (src, NULL);
3645 if (!endp)
3647 target = force_reg (Pmode, XEXP (dest_mem, 0));
3648 dest_mem = replace_equiv_address (dest_mem, target);
3649 end = gen_reg_rtx (Pmode);
3651 else
3653 if (target == 0 || target == const0_rtx)
3655 end = gen_reg_rtx (Pmode);
3656 if (target == 0)
3657 target = end;
3659 else
3660 end = target;
3663 data = insn_data + CODE_FOR_movstr;
3665 if (data->operand[0].mode != VOIDmode)
3666 end = gen_lowpart (data->operand[0].mode, end);
3668 insn = data->genfun (end, dest_mem, src_mem);
3670 gcc_assert (insn);
3672 emit_insn (insn);
3674 /* movstr is supposed to set end to the address of the NUL
3675 terminator. If the caller requested a mempcpy-like return value,
3676 adjust it. */
3677 if (endp == 1 && target != const0_rtx)
3679 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3680 emit_move_insn (target, force_operand (tem, NULL_RTX));
3683 return target;
3686 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3687 NULL_RTX if we failed the caller should emit a normal call, otherwise
3688 try to get the result in TARGET, if convenient (and in mode MODE if that's
3689 convenient). */
3691 static rtx
3692 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3694 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3696 tree dest = CALL_EXPR_ARG (exp, 0);
3697 tree src = CALL_EXPR_ARG (exp, 1);
3698 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3700 return NULL_RTX;
3703 /* Helper function to do the actual work for expand_builtin_strcpy. The
3704 arguments to the builtin_strcpy call DEST and SRC are broken out
3705 so that this can also be called without constructing an actual CALL_EXPR.
3706 The other arguments and return value are the same as for
3707 expand_builtin_strcpy. */
3709 static rtx
3710 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3711 rtx target, enum machine_mode mode)
3713 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3714 if (result)
3715 return expand_expr (result, target, mode, EXPAND_NORMAL);
3716 return expand_movstr (dest, src, target, /*endp=*/0);
3720 /* Expand a call EXP to the stpcpy builtin.
3721 Return NULL_RTX if we failed the caller should emit a normal call,
3722 otherwise try to get the result in TARGET, if convenient (and in
3723 mode MODE if that's convenient). */
3725 static rtx
3726 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3728 tree dst, src;
3730 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3731 return NULL_RTX;
3733 dst = CALL_EXPR_ARG (exp, 0);
3734 src = CALL_EXPR_ARG (exp, 1);
3736 /* If return value is ignored, transform stpcpy into strcpy. */
3737 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3739 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3740 tree result = build_call_expr (fn, 2, dst, src);
3742 STRIP_NOPS (result);
3743 while (TREE_CODE (result) == COMPOUND_EXPR)
3745 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3746 EXPAND_NORMAL);
3747 result = TREE_OPERAND (result, 1);
3749 return expand_expr (result, target, mode, EXPAND_NORMAL);
3751 else
3753 tree len, lenp1;
3754 rtx ret;
3756 /* Ensure we get an actual string whose length can be evaluated at
3757 compile-time, not an expression containing a string. This is
3758 because the latter will potentially produce pessimized code
3759 when used to produce the return value. */
3760 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3761 return expand_movstr (dst, src, target, /*endp=*/2);
3763 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3764 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3765 target, mode, /*endp=*/2);
3767 if (ret)
3768 return ret;
3770 if (TREE_CODE (len) == INTEGER_CST)
3772 rtx len_rtx = expand_normal (len);
3774 if (GET_CODE (len_rtx) == CONST_INT)
3776 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3777 dst, src, target, mode);
3779 if (ret)
3781 if (! target)
3783 if (mode != VOIDmode)
3784 target = gen_reg_rtx (mode);
3785 else
3786 target = gen_reg_rtx (GET_MODE (ret));
3788 if (GET_MODE (target) != GET_MODE (ret))
3789 ret = gen_lowpart (GET_MODE (target), ret);
3791 ret = plus_constant (ret, INTVAL (len_rtx));
3792 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3793 gcc_assert (ret);
3795 return target;
3800 return expand_movstr (dst, src, target, /*endp=*/2);
3804 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3805 bytes from constant string DATA + OFFSET and return it as target
3806 constant. */
3809 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3810 enum machine_mode mode)
3812 const char *str = (const char *) data;
3814 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3815 return const0_rtx;
3817 return c_readstr (str + offset, mode);
3820 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3821 NULL_RTX if we failed the caller should emit a normal call. */
3823 static rtx
3824 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3826 tree fndecl = get_callee_fndecl (exp);
3828 if (validate_arglist (exp,
3829 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3831 tree dest = CALL_EXPR_ARG (exp, 0);
3832 tree src = CALL_EXPR_ARG (exp, 1);
3833 tree len = CALL_EXPR_ARG (exp, 2);
3834 tree slen = c_strlen (src, 1);
3835 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3837 if (result)
3839 while (TREE_CODE (result) == COMPOUND_EXPR)
3841 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3842 EXPAND_NORMAL);
3843 result = TREE_OPERAND (result, 1);
3845 return expand_expr (result, target, mode, EXPAND_NORMAL);
3848 /* We must be passed a constant len and src parameter. */
3849 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3850 return NULL_RTX;
3852 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3854 /* We're required to pad with trailing zeros if the requested
3855 len is greater than strlen(s2)+1. In that case try to
3856 use store_by_pieces, if it fails, punt. */
3857 if (tree_int_cst_lt (slen, len))
3859 unsigned int dest_align
3860 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3861 const char *p = c_getstr (src);
3862 rtx dest_mem;
3864 if (!p || dest_align == 0 || !host_integerp (len, 1)
3865 || !can_store_by_pieces (tree_low_cst (len, 1),
3866 builtin_strncpy_read_str,
3867 CONST_CAST (char *, p),
3868 dest_align, false))
3869 return NULL_RTX;
3871 dest_mem = get_memory_rtx (dest, len);
3872 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3873 builtin_strncpy_read_str,
3874 CONST_CAST (char *, p), dest_align, false, 0);
3875 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3876 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3877 return dest_mem;
3880 return NULL_RTX;
3883 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3884 bytes from constant string DATA + OFFSET and return it as target
3885 constant. */
3888 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3889 enum machine_mode mode)
3891 const char *c = (const char *) data;
3892 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3894 memset (p, *c, GET_MODE_SIZE (mode));
3896 return c_readstr (p, mode);
3899 /* Callback routine for store_by_pieces. Return the RTL of a register
3900 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3901 char value given in the RTL register data. For example, if mode is
3902 4 bytes wide, return the RTL for 0x01010101*data. */
3904 static rtx
3905 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3906 enum machine_mode mode)
3908 rtx target, coeff;
3909 size_t size;
3910 char *p;
3912 size = GET_MODE_SIZE (mode);
3913 if (size == 1)
3914 return (rtx) data;
3916 p = XALLOCAVEC (char, size);
3917 memset (p, 1, size);
3918 coeff = c_readstr (p, mode);
3920 target = convert_to_mode (mode, (rtx) data, 1);
3921 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3922 return force_reg (mode, target);
3925 /* Expand expression EXP, which is a call to the memset builtin. Return
3926 NULL_RTX if we failed the caller should emit a normal call, otherwise
3927 try to get the result in TARGET, if convenient (and in mode MODE if that's
3928 convenient). */
3930 static rtx
3931 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3933 if (!validate_arglist (exp,
3934 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3935 return NULL_RTX;
3936 else
3938 tree dest = CALL_EXPR_ARG (exp, 0);
3939 tree val = CALL_EXPR_ARG (exp, 1);
3940 tree len = CALL_EXPR_ARG (exp, 2);
3941 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3945 /* Helper function to do the actual work for expand_builtin_memset. The
3946 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3947 so that this can also be called without constructing an actual CALL_EXPR.
3948 The other arguments and return value are the same as for
3949 expand_builtin_memset. */
3951 static rtx
3952 expand_builtin_memset_args (tree dest, tree val, tree len,
3953 rtx target, enum machine_mode mode, tree orig_exp)
3955 tree fndecl, fn;
3956 enum built_in_function fcode;
3957 char c;
3958 unsigned int dest_align;
3959 rtx dest_mem, dest_addr, len_rtx;
3960 HOST_WIDE_INT expected_size = -1;
3961 unsigned int expected_align = 0;
3962 tree_ann_common_t ann;
3964 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3966 /* If DEST is not a pointer type, don't do this operation in-line. */
3967 if (dest_align == 0)
3968 return NULL_RTX;
3970 ann = tree_common_ann (orig_exp);
3971 if (ann)
3972 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3974 if (expected_align < dest_align)
3975 expected_align = dest_align;
3977 /* If the LEN parameter is zero, return DEST. */
3978 if (integer_zerop (len))
3980 /* Evaluate and ignore VAL in case it has side-effects. */
3981 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3982 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3985 /* Stabilize the arguments in case we fail. */
3986 dest = builtin_save_expr (dest);
3987 val = builtin_save_expr (val);
3988 len = builtin_save_expr (len);
3990 len_rtx = expand_normal (len);
3991 dest_mem = get_memory_rtx (dest, len);
3993 if (TREE_CODE (val) != INTEGER_CST)
3995 rtx val_rtx;
3997 val_rtx = expand_normal (val);
3998 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3999 val_rtx, 0);
4001 /* Assume that we can memset by pieces if we can store
4002 * the coefficients by pieces (in the required modes).
4003 * We can't pass builtin_memset_gen_str as that emits RTL. */
4004 c = 1;
4005 if (host_integerp (len, 1)
4006 && can_store_by_pieces (tree_low_cst (len, 1),
4007 builtin_memset_read_str, &c, dest_align,
4008 true))
4010 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4011 val_rtx);
4012 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4013 builtin_memset_gen_str, val_rtx, dest_align,
4014 true, 0);
4016 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4017 dest_align, expected_align,
4018 expected_size))
4019 goto do_libcall;
4021 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4022 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4023 return dest_mem;
4026 if (target_char_cast (val, &c))
4027 goto do_libcall;
4029 if (c)
4031 if (host_integerp (len, 1)
4032 && can_store_by_pieces (tree_low_cst (len, 1),
4033 builtin_memset_read_str, &c, dest_align,
4034 true))
4035 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4036 builtin_memset_read_str, &c, dest_align, true, 0);
4037 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4038 dest_align, expected_align,
4039 expected_size))
4040 goto do_libcall;
4042 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4043 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4044 return dest_mem;
4047 set_mem_align (dest_mem, dest_align);
4048 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4049 CALL_EXPR_TAILCALL (orig_exp)
4050 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4051 expected_align, expected_size);
4053 if (dest_addr == 0)
4055 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4056 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4059 return dest_addr;
4061 do_libcall:
4062 fndecl = get_callee_fndecl (orig_exp);
4063 fcode = DECL_FUNCTION_CODE (fndecl);
4064 if (fcode == BUILT_IN_MEMSET)
4065 fn = build_call_expr (fndecl, 3, dest, val, len);
4066 else if (fcode == BUILT_IN_BZERO)
4067 fn = build_call_expr (fndecl, 2, dest, len);
4068 else
4069 gcc_unreachable ();
4070 if (TREE_CODE (fn) == CALL_EXPR)
4071 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4072 return expand_call (fn, target, target == const0_rtx);
4075 /* Expand expression EXP, which is a call to the bzero builtin. Return
4076 NULL_RTX if we failed the caller should emit a normal call. */
4078 static rtx
4079 expand_builtin_bzero (tree exp)
4081 tree dest, size;
4083 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4084 return NULL_RTX;
4086 dest = CALL_EXPR_ARG (exp, 0);
4087 size = CALL_EXPR_ARG (exp, 1);
4089 /* New argument list transforming bzero(ptr x, int y) to
4090 memset(ptr x, int 0, size_t y). This is done this way
4091 so that if it isn't expanded inline, we fallback to
4092 calling bzero instead of memset. */
4094 return expand_builtin_memset_args (dest, integer_zero_node,
4095 fold_convert (sizetype, size),
4096 const0_rtx, VOIDmode, exp);
4099 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4100 caller should emit a normal call, otherwise try to get the result
4101 in TARGET, if convenient (and in mode MODE if that's convenient). */
4103 static rtx
4104 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4106 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4107 INTEGER_TYPE, VOID_TYPE))
4109 tree type = TREE_TYPE (exp);
4110 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4111 CALL_EXPR_ARG (exp, 1),
4112 CALL_EXPR_ARG (exp, 2), type);
4113 if (result)
4114 return expand_expr (result, target, mode, EXPAND_NORMAL);
4116 return NULL_RTX;
4119 /* Expand expression EXP, which is a call to the memcmp built-in function.
4120 Return NULL_RTX if we failed and the
4121 caller should emit a normal call, otherwise try to get the result in
4122 TARGET, if convenient (and in mode MODE, if that's convenient). */
4124 static rtx
4125 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4127 if (!validate_arglist (exp,
4128 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4129 return NULL_RTX;
4130 else
4132 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4133 CALL_EXPR_ARG (exp, 1),
4134 CALL_EXPR_ARG (exp, 2));
4135 if (result)
4136 return expand_expr (result, target, mode, EXPAND_NORMAL);
4139 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4141 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4142 rtx result;
4143 rtx insn;
4144 tree arg1 = CALL_EXPR_ARG (exp, 0);
4145 tree arg2 = CALL_EXPR_ARG (exp, 1);
4146 tree len = CALL_EXPR_ARG (exp, 2);
4148 int arg1_align
4149 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4150 int arg2_align
4151 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4152 enum machine_mode insn_mode;
4154 #ifdef HAVE_cmpmemsi
4155 if (HAVE_cmpmemsi)
4156 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4157 else
4158 #endif
4159 #ifdef HAVE_cmpstrnsi
4160 if (HAVE_cmpstrnsi)
4161 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4162 else
4163 #endif
4164 return NULL_RTX;
4166 /* If we don't have POINTER_TYPE, call the function. */
4167 if (arg1_align == 0 || arg2_align == 0)
4168 return NULL_RTX;
4170 /* Make a place to write the result of the instruction. */
4171 result = target;
4172 if (! (result != 0
4173 && REG_P (result) && GET_MODE (result) == insn_mode
4174 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4175 result = gen_reg_rtx (insn_mode);
4177 arg1_rtx = get_memory_rtx (arg1, len);
4178 arg2_rtx = get_memory_rtx (arg2, len);
4179 arg3_rtx = expand_normal (fold_convert (sizetype, len));
4181 /* Set MEM_SIZE as appropriate. */
4182 if (GET_CODE (arg3_rtx) == CONST_INT)
4184 set_mem_size (arg1_rtx, arg3_rtx);
4185 set_mem_size (arg2_rtx, arg3_rtx);
4188 #ifdef HAVE_cmpmemsi
4189 if (HAVE_cmpmemsi)
4190 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4191 GEN_INT (MIN (arg1_align, arg2_align)));
4192 else
4193 #endif
4194 #ifdef HAVE_cmpstrnsi
4195 if (HAVE_cmpstrnsi)
4196 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4197 GEN_INT (MIN (arg1_align, arg2_align)));
4198 else
4199 #endif
4200 gcc_unreachable ();
4202 if (insn)
4203 emit_insn (insn);
4204 else
4205 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4206 TYPE_MODE (integer_type_node), 3,
4207 XEXP (arg1_rtx, 0), Pmode,
4208 XEXP (arg2_rtx, 0), Pmode,
4209 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4210 TYPE_UNSIGNED (sizetype)),
4211 TYPE_MODE (sizetype));
4213 /* Return the value in the proper mode for this function. */
4214 mode = TYPE_MODE (TREE_TYPE (exp));
4215 if (GET_MODE (result) == mode)
4216 return result;
4217 else if (target != 0)
4219 convert_move (target, result, 0);
4220 return target;
4222 else
4223 return convert_to_mode (mode, result, 0);
4225 #endif
4227 return NULL_RTX;
4230 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4231 if we failed the caller should emit a normal call, otherwise try to get
4232 the result in TARGET, if convenient. */
4234 static rtx
4235 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4237 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4238 return NULL_RTX;
4239 else
4241 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4242 CALL_EXPR_ARG (exp, 1));
4243 if (result)
4244 return expand_expr (result, target, mode, EXPAND_NORMAL);
4247 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4248 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4249 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4251 rtx arg1_rtx, arg2_rtx;
4252 rtx result, insn = NULL_RTX;
4253 tree fndecl, fn;
4254 tree arg1 = CALL_EXPR_ARG (exp, 0);
4255 tree arg2 = CALL_EXPR_ARG (exp, 1);
4257 int arg1_align
4258 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4259 int arg2_align
4260 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4262 /* If we don't have POINTER_TYPE, call the function. */
4263 if (arg1_align == 0 || arg2_align == 0)
4264 return NULL_RTX;
4266 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4267 arg1 = builtin_save_expr (arg1);
4268 arg2 = builtin_save_expr (arg2);
4270 arg1_rtx = get_memory_rtx (arg1, NULL);
4271 arg2_rtx = get_memory_rtx (arg2, NULL);
4273 #ifdef HAVE_cmpstrsi
4274 /* Try to call cmpstrsi. */
4275 if (HAVE_cmpstrsi)
4277 enum machine_mode insn_mode
4278 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4280 /* Make a place to write the result of the instruction. */
4281 result = target;
4282 if (! (result != 0
4283 && REG_P (result) && GET_MODE (result) == insn_mode
4284 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4285 result = gen_reg_rtx (insn_mode);
4287 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4288 GEN_INT (MIN (arg1_align, arg2_align)));
4290 #endif
4291 #ifdef HAVE_cmpstrnsi
4292 /* Try to determine at least one length and call cmpstrnsi. */
4293 if (!insn && HAVE_cmpstrnsi)
4295 tree len;
4296 rtx arg3_rtx;
4298 enum machine_mode insn_mode
4299 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4300 tree len1 = c_strlen (arg1, 1);
4301 tree len2 = c_strlen (arg2, 1);
4303 if (len1)
4304 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4305 if (len2)
4306 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4308 /* If we don't have a constant length for the first, use the length
4309 of the second, if we know it. We don't require a constant for
4310 this case; some cost analysis could be done if both are available
4311 but neither is constant. For now, assume they're equally cheap,
4312 unless one has side effects. If both strings have constant lengths,
4313 use the smaller. */
4315 if (!len1)
4316 len = len2;
4317 else if (!len2)
4318 len = len1;
4319 else if (TREE_SIDE_EFFECTS (len1))
4320 len = len2;
4321 else if (TREE_SIDE_EFFECTS (len2))
4322 len = len1;
4323 else if (TREE_CODE (len1) != INTEGER_CST)
4324 len = len2;
4325 else if (TREE_CODE (len2) != INTEGER_CST)
4326 len = len1;
4327 else if (tree_int_cst_lt (len1, len2))
4328 len = len1;
4329 else
4330 len = len2;
4332 /* If both arguments have side effects, we cannot optimize. */
4333 if (!len || TREE_SIDE_EFFECTS (len))
4334 goto do_libcall;
4336 arg3_rtx = expand_normal (len);
4338 /* Make a place to write the result of the instruction. */
4339 result = target;
4340 if (! (result != 0
4341 && REG_P (result) && GET_MODE (result) == insn_mode
4342 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4343 result = gen_reg_rtx (insn_mode);
4345 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4346 GEN_INT (MIN (arg1_align, arg2_align)));
4348 #endif
4350 if (insn)
4352 emit_insn (insn);
4354 /* Return the value in the proper mode for this function. */
4355 mode = TYPE_MODE (TREE_TYPE (exp));
4356 if (GET_MODE (result) == mode)
4357 return result;
4358 if (target == 0)
4359 return convert_to_mode (mode, result, 0);
4360 convert_move (target, result, 0);
4361 return target;
4364 /* Expand the library call ourselves using a stabilized argument
4365 list to avoid re-evaluating the function's arguments twice. */
4366 #ifdef HAVE_cmpstrnsi
4367 do_libcall:
4368 #endif
4369 fndecl = get_callee_fndecl (exp);
4370 fn = build_call_expr (fndecl, 2, arg1, arg2);
4371 if (TREE_CODE (fn) == CALL_EXPR)
4372 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4373 return expand_call (fn, target, target == const0_rtx);
4375 #endif
4376 return NULL_RTX;
4379 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4380 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4381 the result in TARGET, if convenient. */
4383 static rtx
4384 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4386 if (!validate_arglist (exp,
4387 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4388 return NULL_RTX;
4389 else
4391 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4392 CALL_EXPR_ARG (exp, 1),
4393 CALL_EXPR_ARG (exp, 2));
4394 if (result)
4395 return expand_expr (result, target, mode, EXPAND_NORMAL);
4398 /* If c_strlen can determine an expression for one of the string
4399 lengths, and it doesn't have side effects, then emit cmpstrnsi
4400 using length MIN(strlen(string)+1, arg3). */
4401 #ifdef HAVE_cmpstrnsi
4402 if (HAVE_cmpstrnsi)
4404 tree len, len1, len2;
4405 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4406 rtx result, insn;
4407 tree fndecl, fn;
4408 tree arg1 = CALL_EXPR_ARG (exp, 0);
4409 tree arg2 = CALL_EXPR_ARG (exp, 1);
4410 tree arg3 = CALL_EXPR_ARG (exp, 2);
4412 int arg1_align
4413 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4414 int arg2_align
4415 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4416 enum machine_mode insn_mode
4417 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4419 len1 = c_strlen (arg1, 1);
4420 len2 = c_strlen (arg2, 1);
4422 if (len1)
4423 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4424 if (len2)
4425 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4427 /* If we don't have a constant length for the first, use the length
4428 of the second, if we know it. We don't require a constant for
4429 this case; some cost analysis could be done if both are available
4430 but neither is constant. For now, assume they're equally cheap,
4431 unless one has side effects. If both strings have constant lengths,
4432 use the smaller. */
4434 if (!len1)
4435 len = len2;
4436 else if (!len2)
4437 len = len1;
4438 else if (TREE_SIDE_EFFECTS (len1))
4439 len = len2;
4440 else if (TREE_SIDE_EFFECTS (len2))
4441 len = len1;
4442 else if (TREE_CODE (len1) != INTEGER_CST)
4443 len = len2;
4444 else if (TREE_CODE (len2) != INTEGER_CST)
4445 len = len1;
4446 else if (tree_int_cst_lt (len1, len2))
4447 len = len1;
4448 else
4449 len = len2;
4451 /* If both arguments have side effects, we cannot optimize. */
4452 if (!len || TREE_SIDE_EFFECTS (len))
4453 return NULL_RTX;
4455 /* The actual new length parameter is MIN(len,arg3). */
4456 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4457 fold_convert (TREE_TYPE (len), arg3));
4459 /* If we don't have POINTER_TYPE, call the function. */
4460 if (arg1_align == 0 || arg2_align == 0)
4461 return NULL_RTX;
4463 /* Make a place to write the result of the instruction. */
4464 result = target;
4465 if (! (result != 0
4466 && REG_P (result) && GET_MODE (result) == insn_mode
4467 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4468 result = gen_reg_rtx (insn_mode);
4470 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4471 arg1 = builtin_save_expr (arg1);
4472 arg2 = builtin_save_expr (arg2);
4473 len = builtin_save_expr (len);
4475 arg1_rtx = get_memory_rtx (arg1, len);
4476 arg2_rtx = get_memory_rtx (arg2, len);
4477 arg3_rtx = expand_normal (len);
4478 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4479 GEN_INT (MIN (arg1_align, arg2_align)));
4480 if (insn)
4482 emit_insn (insn);
4484 /* Return the value in the proper mode for this function. */
4485 mode = TYPE_MODE (TREE_TYPE (exp));
4486 if (GET_MODE (result) == mode)
4487 return result;
4488 if (target == 0)
4489 return convert_to_mode (mode, result, 0);
4490 convert_move (target, result, 0);
4491 return target;
4494 /* Expand the library call ourselves using a stabilized argument
4495 list to avoid re-evaluating the function's arguments twice. */
4496 fndecl = get_callee_fndecl (exp);
4497 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4498 if (TREE_CODE (fn) == CALL_EXPR)
4499 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4500 return expand_call (fn, target, target == const0_rtx);
4502 #endif
4503 return NULL_RTX;
4506 /* Expand expression EXP, which is a call to the strcat builtin.
4507 Return NULL_RTX if we failed the caller should emit a normal call,
4508 otherwise try to get the result in TARGET, if convenient. */
4510 static rtx
4511 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4513 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4514 return NULL_RTX;
4515 else
4517 tree dst = CALL_EXPR_ARG (exp, 0);
4518 tree src = CALL_EXPR_ARG (exp, 1);
4519 const char *p = c_getstr (src);
4521 /* If the string length is zero, return the dst parameter. */
4522 if (p && *p == '\0')
4523 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4525 if (optimize_insn_for_speed_p ())
4527 /* See if we can store by pieces into (dst + strlen(dst)). */
4528 tree newsrc, newdst,
4529 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4530 rtx insns;
4532 /* Stabilize the argument list. */
4533 newsrc = builtin_save_expr (src);
4534 dst = builtin_save_expr (dst);
4536 start_sequence ();
4538 /* Create strlen (dst). */
4539 newdst = build_call_expr (strlen_fn, 1, dst);
4540 /* Create (dst p+ strlen (dst)). */
4542 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4543 newdst = builtin_save_expr (newdst);
4545 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4547 end_sequence (); /* Stop sequence. */
4548 return NULL_RTX;
4551 /* Output the entire sequence. */
4552 insns = get_insns ();
4553 end_sequence ();
4554 emit_insn (insns);
4556 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4559 return NULL_RTX;
4563 /* Expand expression EXP, which is a call to the strncat builtin.
4564 Return NULL_RTX if we failed the caller should emit a normal call,
4565 otherwise try to get the result in TARGET, if convenient. */
4567 static rtx
4568 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4570 if (validate_arglist (exp,
4571 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4573 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4574 CALL_EXPR_ARG (exp, 1),
4575 CALL_EXPR_ARG (exp, 2));
4576 if (result)
4577 return expand_expr (result, target, mode, EXPAND_NORMAL);
4579 return NULL_RTX;
4582 /* Expand expression EXP, which is a call to the strspn builtin.
4583 Return NULL_RTX if we failed the caller should emit a normal call,
4584 otherwise try to get the result in TARGET, if convenient. */
4586 static rtx
4587 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4589 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4591 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4592 CALL_EXPR_ARG (exp, 1));
4593 if (result)
4594 return expand_expr (result, target, mode, EXPAND_NORMAL);
4596 return NULL_RTX;
4599 /* Expand expression EXP, which is a call to the strcspn builtin.
4600 Return NULL_RTX if we failed the caller should emit a normal call,
4601 otherwise try to get the result in TARGET, if convenient. */
4603 static rtx
4604 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4606 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4608 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4609 CALL_EXPR_ARG (exp, 1));
4610 if (result)
4611 return expand_expr (result, target, mode, EXPAND_NORMAL);
4613 return NULL_RTX;
4616 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4617 if that's convenient. */
4620 expand_builtin_saveregs (void)
4622 rtx val, seq;
4624 /* Don't do __builtin_saveregs more than once in a function.
4625 Save the result of the first call and reuse it. */
4626 if (saveregs_value != 0)
4627 return saveregs_value;
4629 /* When this function is called, it means that registers must be
4630 saved on entry to this function. So we migrate the call to the
4631 first insn of this function. */
4633 start_sequence ();
4635 /* Do whatever the machine needs done in this case. */
4636 val = targetm.calls.expand_builtin_saveregs ();
4638 seq = get_insns ();
4639 end_sequence ();
4641 saveregs_value = val;
4643 /* Put the insns after the NOTE that starts the function. If this
4644 is inside a start_sequence, make the outer-level insn chain current, so
4645 the code is placed at the start of the function. */
4646 push_topmost_sequence ();
4647 emit_insn_after (seq, entry_of_function ());
4648 pop_topmost_sequence ();
4650 return val;
4653 /* __builtin_args_info (N) returns word N of the arg space info
4654 for the current function. The number and meanings of words
4655 is controlled by the definition of CUMULATIVE_ARGS. */
4657 static rtx
4658 expand_builtin_args_info (tree exp)
4660 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4661 int *word_ptr = (int *) &crtl->args.info;
4663 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4665 if (call_expr_nargs (exp) != 0)
4667 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4668 error ("argument of %<__builtin_args_info%> must be constant");
4669 else
4671 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4673 if (wordnum < 0 || wordnum >= nwords)
4674 error ("argument of %<__builtin_args_info%> out of range");
4675 else
4676 return GEN_INT (word_ptr[wordnum]);
4679 else
4680 error ("missing argument in %<__builtin_args_info%>");
4682 return const0_rtx;
4685 /* Expand a call to __builtin_next_arg. */
4687 static rtx
4688 expand_builtin_next_arg (void)
4690 /* Checking arguments is already done in fold_builtin_next_arg
4691 that must be called before this function. */
4692 return expand_binop (ptr_mode, add_optab,
4693 crtl->args.internal_arg_pointer,
4694 crtl->args.arg_offset_rtx,
4695 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4698 /* Make it easier for the backends by protecting the valist argument
4699 from multiple evaluations. */
4701 static tree
4702 stabilize_va_list (tree valist, int needs_lvalue)
4704 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4706 gcc_assert (vatype != NULL_TREE);
4708 if (TREE_CODE (vatype) == ARRAY_TYPE)
4710 if (TREE_SIDE_EFFECTS (valist))
4711 valist = save_expr (valist);
4713 /* For this case, the backends will be expecting a pointer to
4714 vatype, but it's possible we've actually been given an array
4715 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4716 So fix it. */
4717 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4719 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4720 valist = build_fold_addr_expr_with_type (valist, p1);
4723 else
4725 tree pt;
4727 if (! needs_lvalue)
4729 if (! TREE_SIDE_EFFECTS (valist))
4730 return valist;
4732 pt = build_pointer_type (vatype);
4733 valist = fold_build1 (ADDR_EXPR, pt, valist);
4734 TREE_SIDE_EFFECTS (valist) = 1;
4737 if (TREE_SIDE_EFFECTS (valist))
4738 valist = save_expr (valist);
4739 valist = build_fold_indirect_ref (valist);
4742 return valist;
4745 /* The "standard" definition of va_list is void*. */
4747 tree
4748 std_build_builtin_va_list (void)
4750 return ptr_type_node;
4753 /* The "standard" abi va_list is va_list_type_node. */
4755 tree
4756 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4758 return va_list_type_node;
4761 /* The "standard" type of va_list is va_list_type_node. */
4763 tree
4764 std_canonical_va_list_type (tree type)
4766 tree wtype, htype;
4768 if (INDIRECT_REF_P (type))
4769 type = TREE_TYPE (type);
4770 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4771 type = TREE_TYPE (type);
4772 wtype = va_list_type_node;
4773 htype = type;
4774 /* Treat structure va_list types. */
4775 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4776 htype = TREE_TYPE (htype);
4777 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4779 /* If va_list is an array type, the argument may have decayed
4780 to a pointer type, e.g. by being passed to another function.
4781 In that case, unwrap both types so that we can compare the
4782 underlying records. */
4783 if (TREE_CODE (htype) == ARRAY_TYPE
4784 || POINTER_TYPE_P (htype))
4786 wtype = TREE_TYPE (wtype);
4787 htype = TREE_TYPE (htype);
4790 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4791 return va_list_type_node;
4793 return NULL_TREE;
4796 /* The "standard" implementation of va_start: just assign `nextarg' to
4797 the variable. */
4799 void
4800 std_expand_builtin_va_start (tree valist, rtx nextarg)
4802 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4803 convert_move (va_r, nextarg, 0);
4806 /* Expand EXP, a call to __builtin_va_start. */
4808 static rtx
4809 expand_builtin_va_start (tree exp)
4811 rtx nextarg;
4812 tree valist;
4814 if (call_expr_nargs (exp) < 2)
4816 error ("too few arguments to function %<va_start%>");
4817 return const0_rtx;
4820 if (fold_builtin_next_arg (exp, true))
4821 return const0_rtx;
4823 nextarg = expand_builtin_next_arg ();
4824 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4826 if (targetm.expand_builtin_va_start)
4827 targetm.expand_builtin_va_start (valist, nextarg);
4828 else
4829 std_expand_builtin_va_start (valist, nextarg);
4831 return const0_rtx;
4834 /* The "standard" implementation of va_arg: read the value from the
4835 current (padded) address and increment by the (padded) size. */
4837 tree
4838 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4839 gimple_seq *post_p)
4841 tree addr, t, type_size, rounded_size, valist_tmp;
4842 unsigned HOST_WIDE_INT align, boundary;
4843 bool indirect;
4845 #ifdef ARGS_GROW_DOWNWARD
4846 /* All of the alignment and movement below is for args-grow-up machines.
4847 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4848 implement their own specialized gimplify_va_arg_expr routines. */
4849 gcc_unreachable ();
4850 #endif
4852 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4853 if (indirect)
4854 type = build_pointer_type (type);
4856 align = PARM_BOUNDARY / BITS_PER_UNIT;
4857 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4859 /* When we align parameter on stack for caller, if the parameter
4860 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4861 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4862 here with caller. */
4863 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4864 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4866 boundary /= BITS_PER_UNIT;
4868 /* Hoist the valist value into a temporary for the moment. */
4869 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4871 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4872 requires greater alignment, we must perform dynamic alignment. */
4873 if (boundary > align
4874 && !integer_zerop (TYPE_SIZE (type)))
4876 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4877 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4878 valist_tmp, size_int (boundary - 1)));
4879 gimplify_and_add (t, pre_p);
4881 t = fold_convert (sizetype, valist_tmp);
4882 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4883 fold_convert (TREE_TYPE (valist),
4884 fold_build2 (BIT_AND_EXPR, sizetype, t,
4885 size_int (-boundary))));
4886 gimplify_and_add (t, pre_p);
4888 else
4889 boundary = align;
4891 /* If the actual alignment is less than the alignment of the type,
4892 adjust the type accordingly so that we don't assume strict alignment
4893 when dereferencing the pointer. */
4894 boundary *= BITS_PER_UNIT;
4895 if (boundary < TYPE_ALIGN (type))
4897 type = build_variant_type_copy (type);
4898 TYPE_ALIGN (type) = boundary;
4901 /* Compute the rounded size of the type. */
4902 type_size = size_in_bytes (type);
4903 rounded_size = round_up (type_size, align);
4905 /* Reduce rounded_size so it's sharable with the postqueue. */
4906 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4908 /* Get AP. */
4909 addr = valist_tmp;
4910 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4912 /* Small args are padded downward. */
4913 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4914 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4915 size_binop (MINUS_EXPR, rounded_size, type_size));
4916 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4919 /* Compute new value for AP. */
4920 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4921 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4922 gimplify_and_add (t, pre_p);
4924 addr = fold_convert (build_pointer_type (type), addr);
4926 if (indirect)
4927 addr = build_va_arg_indirect_ref (addr);
4929 return build_va_arg_indirect_ref (addr);
4932 /* Build an indirect-ref expression over the given TREE, which represents a
4933 piece of a va_arg() expansion. */
4934 tree
4935 build_va_arg_indirect_ref (tree addr)
4937 addr = build_fold_indirect_ref (addr);
4939 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4940 mf_mark (addr);
4942 return addr;
4945 /* Return a dummy expression of type TYPE in order to keep going after an
4946 error. */
4948 static tree
4949 dummy_object (tree type)
4951 tree t = build_int_cst (build_pointer_type (type), 0);
4952 return build1 (INDIRECT_REF, type, t);
4955 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4956 builtin function, but a very special sort of operator. */
4958 enum gimplify_status
4959 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4961 tree promoted_type, have_va_type;
4962 tree valist = TREE_OPERAND (*expr_p, 0);
4963 tree type = TREE_TYPE (*expr_p);
4964 tree t;
4966 /* Verify that valist is of the proper type. */
4967 have_va_type = TREE_TYPE (valist);
4968 if (have_va_type == error_mark_node)
4969 return GS_ERROR;
4970 have_va_type = targetm.canonical_va_list_type (have_va_type);
4972 if (have_va_type == NULL_TREE)
4974 error ("first argument to %<va_arg%> not of type %<va_list%>");
4975 return GS_ERROR;
4978 /* Generate a diagnostic for requesting data of a type that cannot
4979 be passed through `...' due to type promotion at the call site. */
4980 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4981 != type)
4983 static bool gave_help;
4984 bool warned;
4986 /* Unfortunately, this is merely undefined, rather than a constraint
4987 violation, so we cannot make this an error. If this call is never
4988 executed, the program is still strictly conforming. */
4989 warned = warning (0, "%qT is promoted to %qT when passed through %<...%>",
4990 type, promoted_type);
4991 if (!gave_help && warned)
4993 gave_help = true;
4994 inform (input_location, "(so you should pass %qT not %qT to %<va_arg%>)",
4995 promoted_type, type);
4998 /* We can, however, treat "undefined" any way we please.
4999 Call abort to encourage the user to fix the program. */
5000 if (warned)
5001 inform (input_location, "if this code is reached, the program will abort");
5002 /* Before the abort, allow the evaluation of the va_list
5003 expression to exit or longjmp. */
5004 gimplify_and_add (valist, pre_p);
5005 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
5006 gimplify_and_add (t, pre_p);
5008 /* This is dead code, but go ahead and finish so that the
5009 mode of the result comes out right. */
5010 *expr_p = dummy_object (type);
5011 return GS_ALL_DONE;
5013 else
5015 /* Make it easier for the backends by protecting the valist argument
5016 from multiple evaluations. */
5017 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5019 /* For this case, the backends will be expecting a pointer to
5020 TREE_TYPE (abi), but it's possible we've
5021 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5022 So fix it. */
5023 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5025 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5026 valist = build_fold_addr_expr_with_type (valist, p1);
5029 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5031 else
5032 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5034 if (!targetm.gimplify_va_arg_expr)
5035 /* FIXME: Once most targets are converted we should merely
5036 assert this is non-null. */
5037 return GS_ALL_DONE;
5039 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5040 return GS_OK;
5044 /* Expand EXP, a call to __builtin_va_end. */
5046 static rtx
5047 expand_builtin_va_end (tree exp)
5049 tree valist = CALL_EXPR_ARG (exp, 0);
5051 /* Evaluate for side effects, if needed. I hate macros that don't
5052 do that. */
5053 if (TREE_SIDE_EFFECTS (valist))
5054 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5056 return const0_rtx;
5059 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5060 builtin rather than just as an assignment in stdarg.h because of the
5061 nastiness of array-type va_list types. */
5063 static rtx
5064 expand_builtin_va_copy (tree exp)
5066 tree dst, src, t;
5068 dst = CALL_EXPR_ARG (exp, 0);
5069 src = CALL_EXPR_ARG (exp, 1);
5071 dst = stabilize_va_list (dst, 1);
5072 src = stabilize_va_list (src, 0);
5074 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5076 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5078 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5079 TREE_SIDE_EFFECTS (t) = 1;
5080 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5082 else
5084 rtx dstb, srcb, size;
5086 /* Evaluate to pointers. */
5087 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5088 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5089 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5090 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5092 dstb = convert_memory_address (Pmode, dstb);
5093 srcb = convert_memory_address (Pmode, srcb);
5095 /* "Dereference" to BLKmode memories. */
5096 dstb = gen_rtx_MEM (BLKmode, dstb);
5097 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5098 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5099 srcb = gen_rtx_MEM (BLKmode, srcb);
5100 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5101 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5103 /* Copy. */
5104 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5107 return const0_rtx;
5110 /* Expand a call to one of the builtin functions __builtin_frame_address or
5111 __builtin_return_address. */
5113 static rtx
5114 expand_builtin_frame_address (tree fndecl, tree exp)
5116 /* The argument must be a nonnegative integer constant.
5117 It counts the number of frames to scan up the stack.
5118 The value is the return address saved in that frame. */
5119 if (call_expr_nargs (exp) == 0)
5120 /* Warning about missing arg was already issued. */
5121 return const0_rtx;
5122 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5124 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5125 error ("invalid argument to %<__builtin_frame_address%>");
5126 else
5127 error ("invalid argument to %<__builtin_return_address%>");
5128 return const0_rtx;
5130 else
5132 rtx tem
5133 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5134 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5136 /* Some ports cannot access arbitrary stack frames. */
5137 if (tem == NULL)
5139 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5140 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5141 else
5142 warning (0, "unsupported argument to %<__builtin_return_address%>");
5143 return const0_rtx;
5146 /* For __builtin_frame_address, return what we've got. */
5147 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5148 return tem;
5150 if (!REG_P (tem)
5151 && ! CONSTANT_P (tem))
5152 tem = copy_to_mode_reg (Pmode, tem);
5153 return tem;
5157 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5158 we failed and the caller should emit a normal call, otherwise try to get
5159 the result in TARGET, if convenient. */
5161 static rtx
5162 expand_builtin_alloca (tree exp, rtx target)
5164 rtx op0;
5165 rtx result;
5167 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5168 should always expand to function calls. These can be intercepted
5169 in libmudflap. */
5170 if (flag_mudflap)
5171 return NULL_RTX;
5173 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5174 return NULL_RTX;
5176 /* Compute the argument. */
5177 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5179 /* Allocate the desired space. */
5180 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5181 result = convert_memory_address (ptr_mode, result);
5183 return result;
5186 /* Expand a call to a bswap builtin with argument ARG0. MODE
5187 is the mode to expand with. */
5189 static rtx
5190 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5192 enum machine_mode mode;
5193 tree arg;
5194 rtx op0;
5196 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5197 return NULL_RTX;
5199 arg = CALL_EXPR_ARG (exp, 0);
5200 mode = TYPE_MODE (TREE_TYPE (arg));
5201 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5203 target = expand_unop (mode, bswap_optab, op0, target, 1);
5205 gcc_assert (target);
5207 return convert_to_mode (mode, target, 0);
5210 /* Expand a call to a unary builtin in EXP.
5211 Return NULL_RTX if a normal call should be emitted rather than expanding the
5212 function in-line. If convenient, the result should be placed in TARGET.
5213 SUBTARGET may be used as the target for computing one of EXP's operands. */
5215 static rtx
5216 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5217 rtx subtarget, optab op_optab)
5219 rtx op0;
5221 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5222 return NULL_RTX;
5224 /* Compute the argument. */
5225 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5226 VOIDmode, EXPAND_NORMAL);
5227 /* Compute op, into TARGET if possible.
5228 Set TARGET to wherever the result comes back. */
5229 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5230 op_optab, op0, target, 1);
5231 gcc_assert (target);
5233 return convert_to_mode (target_mode, target, 0);
5236 /* If the string passed to fputs is a constant and is one character
5237 long, we attempt to transform this call into __builtin_fputc(). */
5239 static rtx
5240 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5242 /* Verify the arguments in the original call. */
5243 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5245 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5246 CALL_EXPR_ARG (exp, 1),
5247 (target == const0_rtx),
5248 unlocked, NULL_TREE);
5249 if (result)
5250 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5252 return NULL_RTX;
5255 /* Expand a call to __builtin_expect. We just return our argument
5256 as the builtin_expect semantic should've been already executed by
5257 tree branch prediction pass. */
5259 static rtx
5260 expand_builtin_expect (tree exp, rtx target)
5262 tree arg, c;
5264 if (call_expr_nargs (exp) < 2)
5265 return const0_rtx;
5266 arg = CALL_EXPR_ARG (exp, 0);
5267 c = CALL_EXPR_ARG (exp, 1);
5269 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5270 /* When guessing was done, the hints should be already stripped away. */
5271 gcc_assert (!flag_guess_branch_prob
5272 || optimize == 0 || errorcount || sorrycount);
5273 return target;
5276 void
5277 expand_builtin_trap (void)
5279 #ifdef HAVE_trap
5280 if (HAVE_trap)
5281 emit_insn (gen_trap ());
5282 else
5283 #endif
5284 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5285 emit_barrier ();
5288 /* Expand EXP, a call to fabs, fabsf or fabsl.
5289 Return NULL_RTX if a normal call should be emitted rather than expanding
5290 the function inline. If convenient, the result should be placed
5291 in TARGET. SUBTARGET may be used as the target for computing
5292 the operand. */
5294 static rtx
5295 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5297 enum machine_mode mode;
5298 tree arg;
5299 rtx op0;
5301 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5302 return NULL_RTX;
5304 arg = CALL_EXPR_ARG (exp, 0);
5305 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5306 mode = TYPE_MODE (TREE_TYPE (arg));
5307 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5308 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5311 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5312 Return NULL is a normal call should be emitted rather than expanding the
5313 function inline. If convenient, the result should be placed in TARGET.
5314 SUBTARGET may be used as the target for computing the operand. */
5316 static rtx
5317 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5319 rtx op0, op1;
5320 tree arg;
5322 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5323 return NULL_RTX;
5325 arg = CALL_EXPR_ARG (exp, 0);
5326 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5328 arg = CALL_EXPR_ARG (exp, 1);
5329 op1 = expand_normal (arg);
5331 return expand_copysign (op0, op1, target);
5334 /* Create a new constant string literal and return a char* pointer to it.
5335 The STRING_CST value is the LEN characters at STR. */
5336 tree
5337 build_string_literal (int len, const char *str)
5339 tree t, elem, index, type;
5341 t = build_string (len, str);
5342 elem = build_type_variant (char_type_node, 1, 0);
5343 index = build_index_type (size_int (len - 1));
5344 type = build_array_type (elem, index);
5345 TREE_TYPE (t) = type;
5346 TREE_CONSTANT (t) = 1;
5347 TREE_READONLY (t) = 1;
5348 TREE_STATIC (t) = 1;
5350 type = build_pointer_type (elem);
5351 t = build1 (ADDR_EXPR, type,
5352 build4 (ARRAY_REF, elem,
5353 t, integer_zero_node, NULL_TREE, NULL_TREE));
5354 return t;
5357 /* Expand EXP, a call to printf or printf_unlocked.
5358 Return NULL_RTX if a normal call should be emitted rather than transforming
5359 the function inline. If convenient, the result should be placed in
5360 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5361 call. */
5362 static rtx
5363 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5364 bool unlocked)
5366 /* If we're using an unlocked function, assume the other unlocked
5367 functions exist explicitly. */
5368 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5369 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5370 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5371 : implicit_built_in_decls[BUILT_IN_PUTS];
5372 const char *fmt_str;
5373 tree fn = 0;
5374 tree fmt, arg;
5375 int nargs = call_expr_nargs (exp);
5377 /* If the return value is used, don't do the transformation. */
5378 if (target != const0_rtx)
5379 return NULL_RTX;
5381 /* Verify the required arguments in the original call. */
5382 if (nargs == 0)
5383 return NULL_RTX;
5384 fmt = CALL_EXPR_ARG (exp, 0);
5385 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5386 return NULL_RTX;
5388 /* Check whether the format is a literal string constant. */
5389 fmt_str = c_getstr (fmt);
5390 if (fmt_str == NULL)
5391 return NULL_RTX;
5393 if (!init_target_chars ())
5394 return NULL_RTX;
5396 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5397 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5399 if ((nargs != 2)
5400 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5401 return NULL_RTX;
5402 if (fn_puts)
5403 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5405 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5406 else if (strcmp (fmt_str, target_percent_c) == 0)
5408 if ((nargs != 2)
5409 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5410 return NULL_RTX;
5411 if (fn_putchar)
5412 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5414 else
5416 /* We can't handle anything else with % args or %% ... yet. */
5417 if (strchr (fmt_str, target_percent))
5418 return NULL_RTX;
5420 if (nargs > 1)
5421 return NULL_RTX;
5423 /* If the format specifier was "", printf does nothing. */
5424 if (fmt_str[0] == '\0')
5425 return const0_rtx;
5426 /* If the format specifier has length of 1, call putchar. */
5427 if (fmt_str[1] == '\0')
5429 /* Given printf("c"), (where c is any one character,)
5430 convert "c"[0] to an int and pass that to the replacement
5431 function. */
5432 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5433 if (fn_putchar)
5434 fn = build_call_expr (fn_putchar, 1, arg);
5436 else
5438 /* If the format specifier was "string\n", call puts("string"). */
5439 size_t len = strlen (fmt_str);
5440 if ((unsigned char)fmt_str[len - 1] == target_newline)
5442 /* Create a NUL-terminated string that's one char shorter
5443 than the original, stripping off the trailing '\n'. */
5444 char *newstr = XALLOCAVEC (char, len);
5445 memcpy (newstr, fmt_str, len - 1);
5446 newstr[len - 1] = 0;
5447 arg = build_string_literal (len, newstr);
5448 if (fn_puts)
5449 fn = build_call_expr (fn_puts, 1, arg);
5451 else
5452 /* We'd like to arrange to call fputs(string,stdout) here,
5453 but we need stdout and don't have a way to get it yet. */
5454 return NULL_RTX;
5458 if (!fn)
5459 return NULL_RTX;
5460 if (TREE_CODE (fn) == CALL_EXPR)
5461 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5462 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5465 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5466 Return NULL_RTX if a normal call should be emitted rather than transforming
5467 the function inline. If convenient, the result should be placed in
5468 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5469 call. */
5470 static rtx
5471 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5472 bool unlocked)
5474 /* If we're using an unlocked function, assume the other unlocked
5475 functions exist explicitly. */
5476 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5477 : implicit_built_in_decls[BUILT_IN_FPUTC];
5478 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5479 : implicit_built_in_decls[BUILT_IN_FPUTS];
5480 const char *fmt_str;
5481 tree fn = 0;
5482 tree fmt, fp, arg;
5483 int nargs = call_expr_nargs (exp);
5485 /* If the return value is used, don't do the transformation. */
5486 if (target != const0_rtx)
5487 return NULL_RTX;
5489 /* Verify the required arguments in the original call. */
5490 if (nargs < 2)
5491 return NULL_RTX;
5492 fp = CALL_EXPR_ARG (exp, 0);
5493 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5494 return NULL_RTX;
5495 fmt = CALL_EXPR_ARG (exp, 1);
5496 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5497 return NULL_RTX;
5499 /* Check whether the format is a literal string constant. */
5500 fmt_str = c_getstr (fmt);
5501 if (fmt_str == NULL)
5502 return NULL_RTX;
5504 if (!init_target_chars ())
5505 return NULL_RTX;
5507 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5508 if (strcmp (fmt_str, target_percent_s) == 0)
5510 if ((nargs != 3)
5511 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5512 return NULL_RTX;
5513 arg = CALL_EXPR_ARG (exp, 2);
5514 if (fn_fputs)
5515 fn = build_call_expr (fn_fputs, 2, arg, fp);
5517 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5518 else if (strcmp (fmt_str, target_percent_c) == 0)
5520 if ((nargs != 3)
5521 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5522 return NULL_RTX;
5523 arg = CALL_EXPR_ARG (exp, 2);
5524 if (fn_fputc)
5525 fn = build_call_expr (fn_fputc, 2, arg, fp);
5527 else
5529 /* We can't handle anything else with % args or %% ... yet. */
5530 if (strchr (fmt_str, target_percent))
5531 return NULL_RTX;
5533 if (nargs > 2)
5534 return NULL_RTX;
5536 /* If the format specifier was "", fprintf does nothing. */
5537 if (fmt_str[0] == '\0')
5539 /* Evaluate and ignore FILE* argument for side-effects. */
5540 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5541 return const0_rtx;
5544 /* When "string" doesn't contain %, replace all cases of
5545 fprintf(stream,string) with fputs(string,stream). The fputs
5546 builtin will take care of special cases like length == 1. */
5547 if (fn_fputs)
5548 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5551 if (!fn)
5552 return NULL_RTX;
5553 if (TREE_CODE (fn) == CALL_EXPR)
5554 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5555 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5558 /* Expand a call EXP to sprintf. Return NULL_RTX if
5559 a normal call should be emitted rather than expanding the function
5560 inline. If convenient, the result should be placed in TARGET with
5561 mode MODE. */
5563 static rtx
5564 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5566 tree dest, fmt;
5567 const char *fmt_str;
5568 int nargs = call_expr_nargs (exp);
5570 /* Verify the required arguments in the original call. */
5571 if (nargs < 2)
5572 return NULL_RTX;
5573 dest = CALL_EXPR_ARG (exp, 0);
5574 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5575 return NULL_RTX;
5576 fmt = CALL_EXPR_ARG (exp, 0);
5577 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5578 return NULL_RTX;
5580 /* Check whether the format is a literal string constant. */
5581 fmt_str = c_getstr (fmt);
5582 if (fmt_str == NULL)
5583 return NULL_RTX;
5585 if (!init_target_chars ())
5586 return NULL_RTX;
5588 /* If the format doesn't contain % args or %%, use strcpy. */
5589 if (strchr (fmt_str, target_percent) == 0)
5591 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5592 tree exp;
5594 if ((nargs > 2) || ! fn)
5595 return NULL_RTX;
5596 expand_expr (build_call_expr (fn, 2, dest, fmt),
5597 const0_rtx, VOIDmode, EXPAND_NORMAL);
5598 if (target == const0_rtx)
5599 return const0_rtx;
5600 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5601 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5603 /* If the format is "%s", use strcpy if the result isn't used. */
5604 else if (strcmp (fmt_str, target_percent_s) == 0)
5606 tree fn, arg, len;
5607 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5609 if (! fn)
5610 return NULL_RTX;
5611 if (nargs != 3)
5612 return NULL_RTX;
5613 arg = CALL_EXPR_ARG (exp, 2);
5614 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5615 return NULL_RTX;
5617 if (target != const0_rtx)
5619 len = c_strlen (arg, 1);
5620 if (! len || TREE_CODE (len) != INTEGER_CST)
5621 return NULL_RTX;
5623 else
5624 len = NULL_TREE;
5626 expand_expr (build_call_expr (fn, 2, dest, arg),
5627 const0_rtx, VOIDmode, EXPAND_NORMAL);
5629 if (target == const0_rtx)
5630 return const0_rtx;
5631 return expand_expr (len, target, mode, EXPAND_NORMAL);
5634 return NULL_RTX;
5637 /* Expand a call to either the entry or exit function profiler. */
5639 static rtx
5640 expand_builtin_profile_func (bool exitp)
5642 rtx this_rtx, which;
5644 this_rtx = DECL_RTL (current_function_decl);
5645 gcc_assert (MEM_P (this_rtx));
5646 this_rtx = XEXP (this_rtx, 0);
5648 if (exitp)
5649 which = profile_function_exit_libfunc;
5650 else
5651 which = profile_function_entry_libfunc;
5653 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5654 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5656 Pmode);
5658 return const0_rtx;
5661 /* Expand a call to __builtin___clear_cache. */
5663 static rtx
5664 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5666 #ifndef HAVE_clear_cache
5667 #ifdef CLEAR_INSN_CACHE
5668 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5669 does something. Just do the default expansion to a call to
5670 __clear_cache(). */
5671 return NULL_RTX;
5672 #else
5673 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5674 does nothing. There is no need to call it. Do nothing. */
5675 return const0_rtx;
5676 #endif /* CLEAR_INSN_CACHE */
5677 #else
5678 /* We have a "clear_cache" insn, and it will handle everything. */
5679 tree begin, end;
5680 rtx begin_rtx, end_rtx;
5681 enum insn_code icode;
5683 /* We must not expand to a library call. If we did, any
5684 fallback library function in libgcc that might contain a call to
5685 __builtin___clear_cache() would recurse infinitely. */
5686 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5688 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5689 return const0_rtx;
5692 if (HAVE_clear_cache)
5694 icode = CODE_FOR_clear_cache;
5696 begin = CALL_EXPR_ARG (exp, 0);
5697 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5698 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5699 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5700 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5702 end = CALL_EXPR_ARG (exp, 1);
5703 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5704 end_rtx = convert_memory_address (Pmode, end_rtx);
5705 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5706 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5708 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5710 return const0_rtx;
5711 #endif /* HAVE_clear_cache */
5714 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5716 static rtx
5717 round_trampoline_addr (rtx tramp)
5719 rtx temp, addend, mask;
5721 /* If we don't need too much alignment, we'll have been guaranteed
5722 proper alignment by get_trampoline_type. */
5723 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5724 return tramp;
5726 /* Round address up to desired boundary. */
5727 temp = gen_reg_rtx (Pmode);
5728 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5729 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5731 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5732 temp, 0, OPTAB_LIB_WIDEN);
5733 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5734 temp, 0, OPTAB_LIB_WIDEN);
5736 return tramp;
5739 static rtx
5740 expand_builtin_init_trampoline (tree exp)
5742 tree t_tramp, t_func, t_chain;
5743 rtx r_tramp, r_func, r_chain;
5744 #ifdef TRAMPOLINE_TEMPLATE
5745 rtx blktramp;
5746 #endif
5748 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5749 POINTER_TYPE, VOID_TYPE))
5750 return NULL_RTX;
5752 t_tramp = CALL_EXPR_ARG (exp, 0);
5753 t_func = CALL_EXPR_ARG (exp, 1);
5754 t_chain = CALL_EXPR_ARG (exp, 2);
5756 r_tramp = expand_normal (t_tramp);
5757 r_func = expand_normal (t_func);
5758 r_chain = expand_normal (t_chain);
5760 /* Generate insns to initialize the trampoline. */
5761 r_tramp = round_trampoline_addr (r_tramp);
5762 #ifdef TRAMPOLINE_TEMPLATE
5763 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5764 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5765 emit_block_move (blktramp, assemble_trampoline_template (),
5766 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5767 #endif
5768 trampolines_created = 1;
5769 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5771 return const0_rtx;
5774 static rtx
5775 expand_builtin_adjust_trampoline (tree exp)
5777 rtx tramp;
5779 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5780 return NULL_RTX;
5782 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5783 tramp = round_trampoline_addr (tramp);
5784 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5785 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5786 #endif
5788 return tramp;
5791 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5792 function. The function first checks whether the back end provides
5793 an insn to implement signbit for the respective mode. If not, it
5794 checks whether the floating point format of the value is such that
5795 the sign bit can be extracted. If that is not the case, the
5796 function returns NULL_RTX to indicate that a normal call should be
5797 emitted rather than expanding the function in-line. EXP is the
5798 expression that is a call to the builtin function; if convenient,
5799 the result should be placed in TARGET. */
5800 static rtx
5801 expand_builtin_signbit (tree exp, rtx target)
5803 const struct real_format *fmt;
5804 enum machine_mode fmode, imode, rmode;
5805 HOST_WIDE_INT hi, lo;
5806 tree arg;
5807 int word, bitpos;
5808 enum insn_code icode;
5809 rtx temp;
5811 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5812 return NULL_RTX;
5814 arg = CALL_EXPR_ARG (exp, 0);
5815 fmode = TYPE_MODE (TREE_TYPE (arg));
5816 rmode = TYPE_MODE (TREE_TYPE (exp));
5817 fmt = REAL_MODE_FORMAT (fmode);
5819 arg = builtin_save_expr (arg);
5821 /* Expand the argument yielding a RTX expression. */
5822 temp = expand_normal (arg);
5824 /* Check if the back end provides an insn that handles signbit for the
5825 argument's mode. */
5826 icode = signbit_optab->handlers [(int) fmode].insn_code;
5827 if (icode != CODE_FOR_nothing)
5829 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5830 emit_unop_insn (icode, target, temp, UNKNOWN);
5831 return target;
5834 /* For floating point formats without a sign bit, implement signbit
5835 as "ARG < 0.0". */
5836 bitpos = fmt->signbit_ro;
5837 if (bitpos < 0)
5839 /* But we can't do this if the format supports signed zero. */
5840 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5841 return NULL_RTX;
5843 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5844 build_real (TREE_TYPE (arg), dconst0));
5845 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5848 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5850 imode = int_mode_for_mode (fmode);
5851 if (imode == BLKmode)
5852 return NULL_RTX;
5853 temp = gen_lowpart (imode, temp);
5855 else
5857 imode = word_mode;
5858 /* Handle targets with different FP word orders. */
5859 if (FLOAT_WORDS_BIG_ENDIAN)
5860 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5861 else
5862 word = bitpos / BITS_PER_WORD;
5863 temp = operand_subword_force (temp, word, fmode);
5864 bitpos = bitpos % BITS_PER_WORD;
5867 /* Force the intermediate word_mode (or narrower) result into a
5868 register. This avoids attempting to create paradoxical SUBREGs
5869 of floating point modes below. */
5870 temp = force_reg (imode, temp);
5872 /* If the bitpos is within the "result mode" lowpart, the operation
5873 can be implement with a single bitwise AND. Otherwise, we need
5874 a right shift and an AND. */
5876 if (bitpos < GET_MODE_BITSIZE (rmode))
5878 if (bitpos < HOST_BITS_PER_WIDE_INT)
5880 hi = 0;
5881 lo = (HOST_WIDE_INT) 1 << bitpos;
5883 else
5885 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5886 lo = 0;
5889 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5890 temp = gen_lowpart (rmode, temp);
5891 temp = expand_binop (rmode, and_optab, temp,
5892 immed_double_const (lo, hi, rmode),
5893 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5895 else
5897 /* Perform a logical right shift to place the signbit in the least
5898 significant bit, then truncate the result to the desired mode
5899 and mask just this bit. */
5900 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5901 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5902 temp = gen_lowpart (rmode, temp);
5903 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5904 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5907 return temp;
5910 /* Expand fork or exec calls. TARGET is the desired target of the
5911 call. EXP is the call. FN is the
5912 identificator of the actual function. IGNORE is nonzero if the
5913 value is to be ignored. */
5915 static rtx
5916 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5918 tree id, decl;
5919 tree call;
5921 /* If we are not profiling, just call the function. */
5922 if (!profile_arc_flag)
5923 return NULL_RTX;
5925 /* Otherwise call the wrapper. This should be equivalent for the rest of
5926 compiler, so the code does not diverge, and the wrapper may run the
5927 code necessary for keeping the profiling sane. */
5929 switch (DECL_FUNCTION_CODE (fn))
5931 case BUILT_IN_FORK:
5932 id = get_identifier ("__gcov_fork");
5933 break;
5935 case BUILT_IN_EXECL:
5936 id = get_identifier ("__gcov_execl");
5937 break;
5939 case BUILT_IN_EXECV:
5940 id = get_identifier ("__gcov_execv");
5941 break;
5943 case BUILT_IN_EXECLP:
5944 id = get_identifier ("__gcov_execlp");
5945 break;
5947 case BUILT_IN_EXECLE:
5948 id = get_identifier ("__gcov_execle");
5949 break;
5951 case BUILT_IN_EXECVP:
5952 id = get_identifier ("__gcov_execvp");
5953 break;
5955 case BUILT_IN_EXECVE:
5956 id = get_identifier ("__gcov_execve");
5957 break;
5959 default:
5960 gcc_unreachable ();
5963 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5964 DECL_EXTERNAL (decl) = 1;
5965 TREE_PUBLIC (decl) = 1;
5966 DECL_ARTIFICIAL (decl) = 1;
5967 TREE_NOTHROW (decl) = 1;
5968 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5969 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5970 call = rewrite_call_expr (exp, 0, decl, 0);
5971 return expand_call (call, target, ignore);
5976 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5977 the pointer in these functions is void*, the tree optimizers may remove
5978 casts. The mode computed in expand_builtin isn't reliable either, due
5979 to __sync_bool_compare_and_swap.
5981 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5982 group of builtins. This gives us log2 of the mode size. */
5984 static inline enum machine_mode
5985 get_builtin_sync_mode (int fcode_diff)
5987 /* The size is not negotiable, so ask not to get BLKmode in return
5988 if the target indicates that a smaller size would be better. */
5989 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5992 /* Expand the memory expression LOC and return the appropriate memory operand
5993 for the builtin_sync operations. */
5995 static rtx
5996 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5998 rtx addr, mem;
6000 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6002 /* Note that we explicitly do not want any alias information for this
6003 memory, so that we kill all other live memories. Otherwise we don't
6004 satisfy the full barrier semantics of the intrinsic. */
6005 mem = validize_mem (gen_rtx_MEM (mode, addr));
6007 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6008 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6009 MEM_VOLATILE_P (mem) = 1;
6011 return mem;
6014 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6015 EXP is the CALL_EXPR. CODE is the rtx code
6016 that corresponds to the arithmetic or logical operation from the name;
6017 an exception here is that NOT actually means NAND. TARGET is an optional
6018 place for us to store the results; AFTER is true if this is the
6019 fetch_and_xxx form. IGNORE is true if we don't actually care about
6020 the result of the operation at all. */
6022 static rtx
6023 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6024 enum rtx_code code, bool after,
6025 rtx target, bool ignore)
6027 rtx val, mem;
6028 enum machine_mode old_mode;
6030 if (code == NOT && warn_sync_nand)
6032 tree fndecl = get_callee_fndecl (exp);
6033 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6035 static bool warned_f_a_n, warned_n_a_f;
6037 switch (fcode)
6039 case BUILT_IN_FETCH_AND_NAND_1:
6040 case BUILT_IN_FETCH_AND_NAND_2:
6041 case BUILT_IN_FETCH_AND_NAND_4:
6042 case BUILT_IN_FETCH_AND_NAND_8:
6043 case BUILT_IN_FETCH_AND_NAND_16:
6045 if (warned_f_a_n)
6046 break;
6048 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6049 inform (input_location,
6050 "%qD changed semantics in GCC 4.4", fndecl);
6051 warned_f_a_n = true;
6052 break;
6054 case BUILT_IN_NAND_AND_FETCH_1:
6055 case BUILT_IN_NAND_AND_FETCH_2:
6056 case BUILT_IN_NAND_AND_FETCH_4:
6057 case BUILT_IN_NAND_AND_FETCH_8:
6058 case BUILT_IN_NAND_AND_FETCH_16:
6060 if (warned_n_a_f)
6061 break;
6063 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6064 inform (input_location,
6065 "%qD changed semantics in GCC 4.4", fndecl);
6066 warned_n_a_f = true;
6067 break;
6069 default:
6070 gcc_unreachable ();
6074 /* Expand the operands. */
6075 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6077 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6078 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6079 of CONST_INTs, where we know the old_mode only from the call argument. */
6080 old_mode = GET_MODE (val);
6081 if (old_mode == VOIDmode)
6082 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6083 val = convert_modes (mode, old_mode, val, 1);
6085 if (ignore)
6086 return expand_sync_operation (mem, val, code);
6087 else
6088 return expand_sync_fetch_operation (mem, val, code, after, target);
6091 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6092 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6093 true if this is the boolean form. TARGET is a place for us to store the
6094 results; this is NOT optional if IS_BOOL is true. */
6096 static rtx
6097 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6098 bool is_bool, rtx target)
6100 rtx old_val, new_val, mem;
6101 enum machine_mode old_mode;
6103 /* Expand the operands. */
6104 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6107 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6108 mode, EXPAND_NORMAL);
6109 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6110 of CONST_INTs, where we know the old_mode only from the call argument. */
6111 old_mode = GET_MODE (old_val);
6112 if (old_mode == VOIDmode)
6113 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6114 old_val = convert_modes (mode, old_mode, old_val, 1);
6116 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6117 mode, EXPAND_NORMAL);
6118 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6119 of CONST_INTs, where we know the old_mode only from the call argument. */
6120 old_mode = GET_MODE (new_val);
6121 if (old_mode == VOIDmode)
6122 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6123 new_val = convert_modes (mode, old_mode, new_val, 1);
6125 if (is_bool)
6126 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6127 else
6128 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6131 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6132 general form is actually an atomic exchange, and some targets only
6133 support a reduced form with the second argument being a constant 1.
6134 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6135 the results. */
6137 static rtx
6138 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6139 rtx target)
6141 rtx val, mem;
6142 enum machine_mode old_mode;
6144 /* Expand the operands. */
6145 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6146 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6147 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6148 of CONST_INTs, where we know the old_mode only from the call argument. */
6149 old_mode = GET_MODE (val);
6150 if (old_mode == VOIDmode)
6151 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6152 val = convert_modes (mode, old_mode, val, 1);
6154 return expand_sync_lock_test_and_set (mem, val, target);
6157 /* Expand the __sync_synchronize intrinsic. */
6159 static void
6160 expand_builtin_synchronize (void)
6162 tree x;
6164 #ifdef HAVE_memory_barrier
6165 if (HAVE_memory_barrier)
6167 emit_insn (gen_memory_barrier ());
6168 return;
6170 #endif
6172 if (synchronize_libfunc != NULL_RTX)
6174 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6175 return;
6178 /* If no explicit memory barrier instruction is available, create an
6179 empty asm stmt with a memory clobber. */
6180 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6181 tree_cons (NULL, build_string (6, "memory"), NULL));
6182 ASM_VOLATILE_P (x) = 1;
6183 expand_asm_expr (x);
6186 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6188 static void
6189 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6191 enum insn_code icode;
6192 rtx mem, insn;
6193 rtx val = const0_rtx;
6195 /* Expand the operands. */
6196 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6198 /* If there is an explicit operation in the md file, use it. */
6199 icode = sync_lock_release[mode];
6200 if (icode != CODE_FOR_nothing)
6202 if (!insn_data[icode].operand[1].predicate (val, mode))
6203 val = force_reg (mode, val);
6205 insn = GEN_FCN (icode) (mem, val);
6206 if (insn)
6208 emit_insn (insn);
6209 return;
6213 /* Otherwise we can implement this operation by emitting a barrier
6214 followed by a store of zero. */
6215 expand_builtin_synchronize ();
6216 emit_move_insn (mem, val);
6219 /* Expand an expression EXP that calls a built-in function,
6220 with result going to TARGET if that's convenient
6221 (and in mode MODE if that's convenient).
6222 SUBTARGET may be used as the target for computing one of EXP's operands.
6223 IGNORE is nonzero if the value is to be ignored. */
6226 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6227 int ignore)
6229 tree fndecl = get_callee_fndecl (exp);
6230 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6231 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6233 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6234 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6236 /* When not optimizing, generate calls to library functions for a certain
6237 set of builtins. */
6238 if (!optimize
6239 && !called_as_built_in (fndecl)
6240 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6241 && fcode != BUILT_IN_ALLOCA
6242 && fcode != BUILT_IN_FREE)
6243 return expand_call (exp, target, ignore);
6245 /* The built-in function expanders test for target == const0_rtx
6246 to determine whether the function's result will be ignored. */
6247 if (ignore)
6248 target = const0_rtx;
6250 /* If the result of a pure or const built-in function is ignored, and
6251 none of its arguments are volatile, we can avoid expanding the
6252 built-in call and just evaluate the arguments for side-effects. */
6253 if (target == const0_rtx
6254 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6256 bool volatilep = false;
6257 tree arg;
6258 call_expr_arg_iterator iter;
6260 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6261 if (TREE_THIS_VOLATILE (arg))
6263 volatilep = true;
6264 break;
6267 if (! volatilep)
6269 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6270 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6271 return const0_rtx;
6275 switch (fcode)
6277 CASE_FLT_FN (BUILT_IN_FABS):
6278 target = expand_builtin_fabs (exp, target, subtarget);
6279 if (target)
6280 return target;
6281 break;
6283 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6284 target = expand_builtin_copysign (exp, target, subtarget);
6285 if (target)
6286 return target;
6287 break;
6289 /* Just do a normal library call if we were unable to fold
6290 the values. */
6291 CASE_FLT_FN (BUILT_IN_CABS):
6292 break;
6294 CASE_FLT_FN (BUILT_IN_EXP):
6295 CASE_FLT_FN (BUILT_IN_EXP10):
6296 CASE_FLT_FN (BUILT_IN_POW10):
6297 CASE_FLT_FN (BUILT_IN_EXP2):
6298 CASE_FLT_FN (BUILT_IN_EXPM1):
6299 CASE_FLT_FN (BUILT_IN_LOGB):
6300 CASE_FLT_FN (BUILT_IN_LOG):
6301 CASE_FLT_FN (BUILT_IN_LOG10):
6302 CASE_FLT_FN (BUILT_IN_LOG2):
6303 CASE_FLT_FN (BUILT_IN_LOG1P):
6304 CASE_FLT_FN (BUILT_IN_TAN):
6305 CASE_FLT_FN (BUILT_IN_ASIN):
6306 CASE_FLT_FN (BUILT_IN_ACOS):
6307 CASE_FLT_FN (BUILT_IN_ATAN):
6308 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6309 because of possible accuracy problems. */
6310 if (! flag_unsafe_math_optimizations)
6311 break;
6312 CASE_FLT_FN (BUILT_IN_SQRT):
6313 CASE_FLT_FN (BUILT_IN_FLOOR):
6314 CASE_FLT_FN (BUILT_IN_CEIL):
6315 CASE_FLT_FN (BUILT_IN_TRUNC):
6316 CASE_FLT_FN (BUILT_IN_ROUND):
6317 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6318 CASE_FLT_FN (BUILT_IN_RINT):
6319 target = expand_builtin_mathfn (exp, target, subtarget);
6320 if (target)
6321 return target;
6322 break;
6324 CASE_FLT_FN (BUILT_IN_ILOGB):
6325 if (! flag_unsafe_math_optimizations)
6326 break;
6327 CASE_FLT_FN (BUILT_IN_ISINF):
6328 CASE_FLT_FN (BUILT_IN_FINITE):
6329 case BUILT_IN_ISFINITE:
6330 case BUILT_IN_ISNORMAL:
6331 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6332 if (target)
6333 return target;
6334 break;
6336 CASE_FLT_FN (BUILT_IN_LCEIL):
6337 CASE_FLT_FN (BUILT_IN_LLCEIL):
6338 CASE_FLT_FN (BUILT_IN_LFLOOR):
6339 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6340 target = expand_builtin_int_roundingfn (exp, target);
6341 if (target)
6342 return target;
6343 break;
6345 CASE_FLT_FN (BUILT_IN_LRINT):
6346 CASE_FLT_FN (BUILT_IN_LLRINT):
6347 CASE_FLT_FN (BUILT_IN_LROUND):
6348 CASE_FLT_FN (BUILT_IN_LLROUND):
6349 target = expand_builtin_int_roundingfn_2 (exp, target);
6350 if (target)
6351 return target;
6352 break;
6354 CASE_FLT_FN (BUILT_IN_POW):
6355 target = expand_builtin_pow (exp, target, subtarget);
6356 if (target)
6357 return target;
6358 break;
6360 CASE_FLT_FN (BUILT_IN_POWI):
6361 target = expand_builtin_powi (exp, target, subtarget);
6362 if (target)
6363 return target;
6364 break;
6366 CASE_FLT_FN (BUILT_IN_ATAN2):
6367 CASE_FLT_FN (BUILT_IN_LDEXP):
6368 CASE_FLT_FN (BUILT_IN_SCALB):
6369 CASE_FLT_FN (BUILT_IN_SCALBN):
6370 CASE_FLT_FN (BUILT_IN_SCALBLN):
6371 if (! flag_unsafe_math_optimizations)
6372 break;
6374 CASE_FLT_FN (BUILT_IN_FMOD):
6375 CASE_FLT_FN (BUILT_IN_REMAINDER):
6376 CASE_FLT_FN (BUILT_IN_DREM):
6377 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6378 if (target)
6379 return target;
6380 break;
6382 CASE_FLT_FN (BUILT_IN_CEXPI):
6383 target = expand_builtin_cexpi (exp, target, subtarget);
6384 gcc_assert (target);
6385 return target;
6387 CASE_FLT_FN (BUILT_IN_SIN):
6388 CASE_FLT_FN (BUILT_IN_COS):
6389 if (! flag_unsafe_math_optimizations)
6390 break;
6391 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6392 if (target)
6393 return target;
6394 break;
6396 CASE_FLT_FN (BUILT_IN_SINCOS):
6397 if (! flag_unsafe_math_optimizations)
6398 break;
6399 target = expand_builtin_sincos (exp);
6400 if (target)
6401 return target;
6402 break;
6404 case BUILT_IN_APPLY_ARGS:
6405 return expand_builtin_apply_args ();
6407 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6408 FUNCTION with a copy of the parameters described by
6409 ARGUMENTS, and ARGSIZE. It returns a block of memory
6410 allocated on the stack into which is stored all the registers
6411 that might possibly be used for returning the result of a
6412 function. ARGUMENTS is the value returned by
6413 __builtin_apply_args. ARGSIZE is the number of bytes of
6414 arguments that must be copied. ??? How should this value be
6415 computed? We'll also need a safe worst case value for varargs
6416 functions. */
6417 case BUILT_IN_APPLY:
6418 if (!validate_arglist (exp, POINTER_TYPE,
6419 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6420 && !validate_arglist (exp, REFERENCE_TYPE,
6421 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6422 return const0_rtx;
6423 else
6425 rtx ops[3];
6427 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6428 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6429 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6431 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6434 /* __builtin_return (RESULT) causes the function to return the
6435 value described by RESULT. RESULT is address of the block of
6436 memory returned by __builtin_apply. */
6437 case BUILT_IN_RETURN:
6438 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6439 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6440 return const0_rtx;
6442 case BUILT_IN_SAVEREGS:
6443 return expand_builtin_saveregs ();
6445 case BUILT_IN_ARGS_INFO:
6446 return expand_builtin_args_info (exp);
6448 case BUILT_IN_VA_ARG_PACK:
6449 /* All valid uses of __builtin_va_arg_pack () are removed during
6450 inlining. */
6451 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6452 return const0_rtx;
6454 case BUILT_IN_VA_ARG_PACK_LEN:
6455 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6456 inlining. */
6457 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6458 return const0_rtx;
6460 /* Return the address of the first anonymous stack arg. */
6461 case BUILT_IN_NEXT_ARG:
6462 if (fold_builtin_next_arg (exp, false))
6463 return const0_rtx;
6464 return expand_builtin_next_arg ();
6466 case BUILT_IN_CLEAR_CACHE:
6467 target = expand_builtin___clear_cache (exp);
6468 if (target)
6469 return target;
6470 break;
6472 case BUILT_IN_CLASSIFY_TYPE:
6473 return expand_builtin_classify_type (exp);
6475 case BUILT_IN_CONSTANT_P:
6476 return const0_rtx;
6478 case BUILT_IN_FRAME_ADDRESS:
6479 case BUILT_IN_RETURN_ADDRESS:
6480 return expand_builtin_frame_address (fndecl, exp);
6482 /* Returns the address of the area where the structure is returned.
6483 0 otherwise. */
6484 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6485 if (call_expr_nargs (exp) != 0
6486 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6487 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6488 return const0_rtx;
6489 else
6490 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6492 case BUILT_IN_ALLOCA:
6493 target = expand_builtin_alloca (exp, target);
6494 if (target)
6495 return target;
6496 break;
6498 case BUILT_IN_STACK_SAVE:
6499 return expand_stack_save ();
6501 case BUILT_IN_STACK_RESTORE:
6502 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6503 return const0_rtx;
6505 case BUILT_IN_BSWAP32:
6506 case BUILT_IN_BSWAP64:
6507 target = expand_builtin_bswap (exp, target, subtarget);
6509 if (target)
6510 return target;
6511 break;
6513 CASE_INT_FN (BUILT_IN_FFS):
6514 case BUILT_IN_FFSIMAX:
6515 target = expand_builtin_unop (target_mode, exp, target,
6516 subtarget, ffs_optab);
6517 if (target)
6518 return target;
6519 break;
6521 CASE_INT_FN (BUILT_IN_CLZ):
6522 case BUILT_IN_CLZIMAX:
6523 target = expand_builtin_unop (target_mode, exp, target,
6524 subtarget, clz_optab);
6525 if (target)
6526 return target;
6527 break;
6529 CASE_INT_FN (BUILT_IN_CTZ):
6530 case BUILT_IN_CTZIMAX:
6531 target = expand_builtin_unop (target_mode, exp, target,
6532 subtarget, ctz_optab);
6533 if (target)
6534 return target;
6535 break;
6537 CASE_INT_FN (BUILT_IN_POPCOUNT):
6538 case BUILT_IN_POPCOUNTIMAX:
6539 target = expand_builtin_unop (target_mode, exp, target,
6540 subtarget, popcount_optab);
6541 if (target)
6542 return target;
6543 break;
6545 CASE_INT_FN (BUILT_IN_PARITY):
6546 case BUILT_IN_PARITYIMAX:
6547 target = expand_builtin_unop (target_mode, exp, target,
6548 subtarget, parity_optab);
6549 if (target)
6550 return target;
6551 break;
6553 case BUILT_IN_STRLEN:
6554 target = expand_builtin_strlen (exp, target, target_mode);
6555 if (target)
6556 return target;
6557 break;
6559 case BUILT_IN_STRCPY:
6560 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6561 if (target)
6562 return target;
6563 break;
6565 case BUILT_IN_STRNCPY:
6566 target = expand_builtin_strncpy (exp, target, mode);
6567 if (target)
6568 return target;
6569 break;
6571 case BUILT_IN_STPCPY:
6572 target = expand_builtin_stpcpy (exp, target, mode);
6573 if (target)
6574 return target;
6575 break;
6577 case BUILT_IN_STRCAT:
6578 target = expand_builtin_strcat (fndecl, exp, target, mode);
6579 if (target)
6580 return target;
6581 break;
6583 case BUILT_IN_STRNCAT:
6584 target = expand_builtin_strncat (exp, target, mode);
6585 if (target)
6586 return target;
6587 break;
6589 case BUILT_IN_STRSPN:
6590 target = expand_builtin_strspn (exp, target, mode);
6591 if (target)
6592 return target;
6593 break;
6595 case BUILT_IN_STRCSPN:
6596 target = expand_builtin_strcspn (exp, target, mode);
6597 if (target)
6598 return target;
6599 break;
6601 case BUILT_IN_STRSTR:
6602 target = expand_builtin_strstr (exp, target, mode);
6603 if (target)
6604 return target;
6605 break;
6607 case BUILT_IN_STRPBRK:
6608 target = expand_builtin_strpbrk (exp, target, mode);
6609 if (target)
6610 return target;
6611 break;
6613 case BUILT_IN_INDEX:
6614 case BUILT_IN_STRCHR:
6615 target = expand_builtin_strchr (exp, target, mode);
6616 if (target)
6617 return target;
6618 break;
6620 case BUILT_IN_RINDEX:
6621 case BUILT_IN_STRRCHR:
6622 target = expand_builtin_strrchr (exp, target, mode);
6623 if (target)
6624 return target;
6625 break;
6627 case BUILT_IN_MEMCPY:
6628 target = expand_builtin_memcpy (exp, target, mode);
6629 if (target)
6630 return target;
6631 break;
6633 case BUILT_IN_MEMPCPY:
6634 target = expand_builtin_mempcpy (exp, target, mode);
6635 if (target)
6636 return target;
6637 break;
6639 case BUILT_IN_MEMMOVE:
6640 target = expand_builtin_memmove (exp, target, mode, ignore);
6641 if (target)
6642 return target;
6643 break;
6645 case BUILT_IN_BCOPY:
6646 target = expand_builtin_bcopy (exp, ignore);
6647 if (target)
6648 return target;
6649 break;
6651 case BUILT_IN_MEMSET:
6652 target = expand_builtin_memset (exp, target, mode);
6653 if (target)
6654 return target;
6655 break;
6657 case BUILT_IN_BZERO:
6658 target = expand_builtin_bzero (exp);
6659 if (target)
6660 return target;
6661 break;
6663 case BUILT_IN_STRCMP:
6664 target = expand_builtin_strcmp (exp, target, mode);
6665 if (target)
6666 return target;
6667 break;
6669 case BUILT_IN_STRNCMP:
6670 target = expand_builtin_strncmp (exp, target, mode);
6671 if (target)
6672 return target;
6673 break;
6675 case BUILT_IN_MEMCHR:
6676 target = expand_builtin_memchr (exp, target, mode);
6677 if (target)
6678 return target;
6679 break;
6681 case BUILT_IN_BCMP:
6682 case BUILT_IN_MEMCMP:
6683 target = expand_builtin_memcmp (exp, target, mode);
6684 if (target)
6685 return target;
6686 break;
6688 case BUILT_IN_SETJMP:
6689 /* This should have been lowered to the builtins below. */
6690 gcc_unreachable ();
6692 case BUILT_IN_SETJMP_SETUP:
6693 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6694 and the receiver label. */
6695 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6697 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6698 VOIDmode, EXPAND_NORMAL);
6699 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6700 rtx label_r = label_rtx (label);
6702 /* This is copied from the handling of non-local gotos. */
6703 expand_builtin_setjmp_setup (buf_addr, label_r);
6704 nonlocal_goto_handler_labels
6705 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6706 nonlocal_goto_handler_labels);
6707 /* ??? Do not let expand_label treat us as such since we would
6708 not want to be both on the list of non-local labels and on
6709 the list of forced labels. */
6710 FORCED_LABEL (label) = 0;
6711 return const0_rtx;
6713 break;
6715 case BUILT_IN_SETJMP_DISPATCHER:
6716 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6717 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6719 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6720 rtx label_r = label_rtx (label);
6722 /* Remove the dispatcher label from the list of non-local labels
6723 since the receiver labels have been added to it above. */
6724 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6725 return const0_rtx;
6727 break;
6729 case BUILT_IN_SETJMP_RECEIVER:
6730 /* __builtin_setjmp_receiver is passed the receiver label. */
6731 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6733 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6734 rtx label_r = label_rtx (label);
6736 expand_builtin_setjmp_receiver (label_r);
6737 return const0_rtx;
6739 break;
6741 /* __builtin_longjmp is passed a pointer to an array of five words.
6742 It's similar to the C library longjmp function but works with
6743 __builtin_setjmp above. */
6744 case BUILT_IN_LONGJMP:
6745 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6747 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6748 VOIDmode, EXPAND_NORMAL);
6749 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6751 if (value != const1_rtx)
6753 error ("%<__builtin_longjmp%> second argument must be 1");
6754 return const0_rtx;
6757 expand_builtin_longjmp (buf_addr, value);
6758 return const0_rtx;
6760 break;
6762 case BUILT_IN_NONLOCAL_GOTO:
6763 target = expand_builtin_nonlocal_goto (exp);
6764 if (target)
6765 return target;
6766 break;
6768 /* This updates the setjmp buffer that is its argument with the value
6769 of the current stack pointer. */
6770 case BUILT_IN_UPDATE_SETJMP_BUF:
6771 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6773 rtx buf_addr
6774 = expand_normal (CALL_EXPR_ARG (exp, 0));
6776 expand_builtin_update_setjmp_buf (buf_addr);
6777 return const0_rtx;
6779 break;
6781 case BUILT_IN_TRAP:
6782 expand_builtin_trap ();
6783 return const0_rtx;
6785 case BUILT_IN_PRINTF:
6786 target = expand_builtin_printf (exp, target, mode, false);
6787 if (target)
6788 return target;
6789 break;
6791 case BUILT_IN_PRINTF_UNLOCKED:
6792 target = expand_builtin_printf (exp, target, mode, true);
6793 if (target)
6794 return target;
6795 break;
6797 case BUILT_IN_FPUTS:
6798 target = expand_builtin_fputs (exp, target, false);
6799 if (target)
6800 return target;
6801 break;
6802 case BUILT_IN_FPUTS_UNLOCKED:
6803 target = expand_builtin_fputs (exp, target, true);
6804 if (target)
6805 return target;
6806 break;
6808 case BUILT_IN_FPRINTF:
6809 target = expand_builtin_fprintf (exp, target, mode, false);
6810 if (target)
6811 return target;
6812 break;
6814 case BUILT_IN_FPRINTF_UNLOCKED:
6815 target = expand_builtin_fprintf (exp, target, mode, true);
6816 if (target)
6817 return target;
6818 break;
6820 case BUILT_IN_SPRINTF:
6821 target = expand_builtin_sprintf (exp, target, mode);
6822 if (target)
6823 return target;
6824 break;
6826 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6827 case BUILT_IN_SIGNBITD32:
6828 case BUILT_IN_SIGNBITD64:
6829 case BUILT_IN_SIGNBITD128:
6830 target = expand_builtin_signbit (exp, target);
6831 if (target)
6832 return target;
6833 break;
6835 /* Various hooks for the DWARF 2 __throw routine. */
6836 case BUILT_IN_UNWIND_INIT:
6837 expand_builtin_unwind_init ();
6838 return const0_rtx;
6839 case BUILT_IN_DWARF_CFA:
6840 return virtual_cfa_rtx;
6841 #ifdef DWARF2_UNWIND_INFO
6842 case BUILT_IN_DWARF_SP_COLUMN:
6843 return expand_builtin_dwarf_sp_column ();
6844 case BUILT_IN_INIT_DWARF_REG_SIZES:
6845 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6846 return const0_rtx;
6847 #endif
6848 case BUILT_IN_FROB_RETURN_ADDR:
6849 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6850 case BUILT_IN_EXTRACT_RETURN_ADDR:
6851 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6852 case BUILT_IN_EH_RETURN:
6853 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6854 CALL_EXPR_ARG (exp, 1));
6855 return const0_rtx;
6856 #ifdef EH_RETURN_DATA_REGNO
6857 case BUILT_IN_EH_RETURN_DATA_REGNO:
6858 return expand_builtin_eh_return_data_regno (exp);
6859 #endif
6860 case BUILT_IN_EXTEND_POINTER:
6861 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6863 case BUILT_IN_VA_START:
6864 return expand_builtin_va_start (exp);
6865 case BUILT_IN_VA_END:
6866 return expand_builtin_va_end (exp);
6867 case BUILT_IN_VA_COPY:
6868 return expand_builtin_va_copy (exp);
6869 case BUILT_IN_EXPECT:
6870 return expand_builtin_expect (exp, target);
6871 case BUILT_IN_PREFETCH:
6872 expand_builtin_prefetch (exp);
6873 return const0_rtx;
6875 case BUILT_IN_PROFILE_FUNC_ENTER:
6876 return expand_builtin_profile_func (false);
6877 case BUILT_IN_PROFILE_FUNC_EXIT:
6878 return expand_builtin_profile_func (true);
6880 case BUILT_IN_INIT_TRAMPOLINE:
6881 return expand_builtin_init_trampoline (exp);
6882 case BUILT_IN_ADJUST_TRAMPOLINE:
6883 return expand_builtin_adjust_trampoline (exp);
6885 case BUILT_IN_FORK:
6886 case BUILT_IN_EXECL:
6887 case BUILT_IN_EXECV:
6888 case BUILT_IN_EXECLP:
6889 case BUILT_IN_EXECLE:
6890 case BUILT_IN_EXECVP:
6891 case BUILT_IN_EXECVE:
6892 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6893 if (target)
6894 return target;
6895 break;
6897 case BUILT_IN_FETCH_AND_ADD_1:
6898 case BUILT_IN_FETCH_AND_ADD_2:
6899 case BUILT_IN_FETCH_AND_ADD_4:
6900 case BUILT_IN_FETCH_AND_ADD_8:
6901 case BUILT_IN_FETCH_AND_ADD_16:
6902 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6903 target = expand_builtin_sync_operation (mode, exp, PLUS,
6904 false, target, ignore);
6905 if (target)
6906 return target;
6907 break;
6909 case BUILT_IN_FETCH_AND_SUB_1:
6910 case BUILT_IN_FETCH_AND_SUB_2:
6911 case BUILT_IN_FETCH_AND_SUB_4:
6912 case BUILT_IN_FETCH_AND_SUB_8:
6913 case BUILT_IN_FETCH_AND_SUB_16:
6914 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6915 target = expand_builtin_sync_operation (mode, exp, MINUS,
6916 false, target, ignore);
6917 if (target)
6918 return target;
6919 break;
6921 case BUILT_IN_FETCH_AND_OR_1:
6922 case BUILT_IN_FETCH_AND_OR_2:
6923 case BUILT_IN_FETCH_AND_OR_4:
6924 case BUILT_IN_FETCH_AND_OR_8:
6925 case BUILT_IN_FETCH_AND_OR_16:
6926 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6927 target = expand_builtin_sync_operation (mode, exp, IOR,
6928 false, target, ignore);
6929 if (target)
6930 return target;
6931 break;
6933 case BUILT_IN_FETCH_AND_AND_1:
6934 case BUILT_IN_FETCH_AND_AND_2:
6935 case BUILT_IN_FETCH_AND_AND_4:
6936 case BUILT_IN_FETCH_AND_AND_8:
6937 case BUILT_IN_FETCH_AND_AND_16:
6938 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6939 target = expand_builtin_sync_operation (mode, exp, AND,
6940 false, target, ignore);
6941 if (target)
6942 return target;
6943 break;
6945 case BUILT_IN_FETCH_AND_XOR_1:
6946 case BUILT_IN_FETCH_AND_XOR_2:
6947 case BUILT_IN_FETCH_AND_XOR_4:
6948 case BUILT_IN_FETCH_AND_XOR_8:
6949 case BUILT_IN_FETCH_AND_XOR_16:
6950 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6951 target = expand_builtin_sync_operation (mode, exp, XOR,
6952 false, target, ignore);
6953 if (target)
6954 return target;
6955 break;
6957 case BUILT_IN_FETCH_AND_NAND_1:
6958 case BUILT_IN_FETCH_AND_NAND_2:
6959 case BUILT_IN_FETCH_AND_NAND_4:
6960 case BUILT_IN_FETCH_AND_NAND_8:
6961 case BUILT_IN_FETCH_AND_NAND_16:
6962 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6963 target = expand_builtin_sync_operation (mode, exp, NOT,
6964 false, target, ignore);
6965 if (target)
6966 return target;
6967 break;
6969 case BUILT_IN_ADD_AND_FETCH_1:
6970 case BUILT_IN_ADD_AND_FETCH_2:
6971 case BUILT_IN_ADD_AND_FETCH_4:
6972 case BUILT_IN_ADD_AND_FETCH_8:
6973 case BUILT_IN_ADD_AND_FETCH_16:
6974 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6975 target = expand_builtin_sync_operation (mode, exp, PLUS,
6976 true, target, ignore);
6977 if (target)
6978 return target;
6979 break;
6981 case BUILT_IN_SUB_AND_FETCH_1:
6982 case BUILT_IN_SUB_AND_FETCH_2:
6983 case BUILT_IN_SUB_AND_FETCH_4:
6984 case BUILT_IN_SUB_AND_FETCH_8:
6985 case BUILT_IN_SUB_AND_FETCH_16:
6986 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6987 target = expand_builtin_sync_operation (mode, exp, MINUS,
6988 true, target, ignore);
6989 if (target)
6990 return target;
6991 break;
6993 case BUILT_IN_OR_AND_FETCH_1:
6994 case BUILT_IN_OR_AND_FETCH_2:
6995 case BUILT_IN_OR_AND_FETCH_4:
6996 case BUILT_IN_OR_AND_FETCH_8:
6997 case BUILT_IN_OR_AND_FETCH_16:
6998 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6999 target = expand_builtin_sync_operation (mode, exp, IOR,
7000 true, target, ignore);
7001 if (target)
7002 return target;
7003 break;
7005 case BUILT_IN_AND_AND_FETCH_1:
7006 case BUILT_IN_AND_AND_FETCH_2:
7007 case BUILT_IN_AND_AND_FETCH_4:
7008 case BUILT_IN_AND_AND_FETCH_8:
7009 case BUILT_IN_AND_AND_FETCH_16:
7010 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7011 target = expand_builtin_sync_operation (mode, exp, AND,
7012 true, target, ignore);
7013 if (target)
7014 return target;
7015 break;
7017 case BUILT_IN_XOR_AND_FETCH_1:
7018 case BUILT_IN_XOR_AND_FETCH_2:
7019 case BUILT_IN_XOR_AND_FETCH_4:
7020 case BUILT_IN_XOR_AND_FETCH_8:
7021 case BUILT_IN_XOR_AND_FETCH_16:
7022 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7023 target = expand_builtin_sync_operation (mode, exp, XOR,
7024 true, target, ignore);
7025 if (target)
7026 return target;
7027 break;
7029 case BUILT_IN_NAND_AND_FETCH_1:
7030 case BUILT_IN_NAND_AND_FETCH_2:
7031 case BUILT_IN_NAND_AND_FETCH_4:
7032 case BUILT_IN_NAND_AND_FETCH_8:
7033 case BUILT_IN_NAND_AND_FETCH_16:
7034 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7035 target = expand_builtin_sync_operation (mode, exp, NOT,
7036 true, target, ignore);
7037 if (target)
7038 return target;
7039 break;
7041 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7042 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7043 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7044 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7045 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7046 if (mode == VOIDmode)
7047 mode = TYPE_MODE (boolean_type_node);
7048 if (!target || !register_operand (target, mode))
7049 target = gen_reg_rtx (mode);
7051 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7052 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7053 if (target)
7054 return target;
7055 break;
7057 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7058 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7059 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7060 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7061 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7062 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7063 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7064 if (target)
7065 return target;
7066 break;
7068 case BUILT_IN_LOCK_TEST_AND_SET_1:
7069 case BUILT_IN_LOCK_TEST_AND_SET_2:
7070 case BUILT_IN_LOCK_TEST_AND_SET_4:
7071 case BUILT_IN_LOCK_TEST_AND_SET_8:
7072 case BUILT_IN_LOCK_TEST_AND_SET_16:
7073 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7074 target = expand_builtin_lock_test_and_set (mode, exp, target);
7075 if (target)
7076 return target;
7077 break;
7079 case BUILT_IN_LOCK_RELEASE_1:
7080 case BUILT_IN_LOCK_RELEASE_2:
7081 case BUILT_IN_LOCK_RELEASE_4:
7082 case BUILT_IN_LOCK_RELEASE_8:
7083 case BUILT_IN_LOCK_RELEASE_16:
7084 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7085 expand_builtin_lock_release (mode, exp);
7086 return const0_rtx;
7088 case BUILT_IN_SYNCHRONIZE:
7089 expand_builtin_synchronize ();
7090 return const0_rtx;
7092 case BUILT_IN_OBJECT_SIZE:
7093 return expand_builtin_object_size (exp);
7095 case BUILT_IN_MEMCPY_CHK:
7096 case BUILT_IN_MEMPCPY_CHK:
7097 case BUILT_IN_MEMMOVE_CHK:
7098 case BUILT_IN_MEMSET_CHK:
7099 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7100 if (target)
7101 return target;
7102 break;
7104 case BUILT_IN_STRCPY_CHK:
7105 case BUILT_IN_STPCPY_CHK:
7106 case BUILT_IN_STRNCPY_CHK:
7107 case BUILT_IN_STRCAT_CHK:
7108 case BUILT_IN_STRNCAT_CHK:
7109 case BUILT_IN_SNPRINTF_CHK:
7110 case BUILT_IN_VSNPRINTF_CHK:
7111 maybe_emit_chk_warning (exp, fcode);
7112 break;
7114 case BUILT_IN_SPRINTF_CHK:
7115 case BUILT_IN_VSPRINTF_CHK:
7116 maybe_emit_sprintf_chk_warning (exp, fcode);
7117 break;
7119 case BUILT_IN_FREE:
7120 maybe_emit_free_warning (exp);
7121 break;
7123 default: /* just do library call, if unknown builtin */
7124 break;
7127 /* The switch statement above can drop through to cause the function
7128 to be called normally. */
7129 return expand_call (exp, target, ignore);
7132 /* Determine whether a tree node represents a call to a built-in
7133 function. If the tree T is a call to a built-in function with
7134 the right number of arguments of the appropriate types, return
7135 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7136 Otherwise the return value is END_BUILTINS. */
7138 enum built_in_function
7139 builtin_mathfn_code (const_tree t)
7141 const_tree fndecl, arg, parmlist;
7142 const_tree argtype, parmtype;
7143 const_call_expr_arg_iterator iter;
7145 if (TREE_CODE (t) != CALL_EXPR
7146 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7147 return END_BUILTINS;
7149 fndecl = get_callee_fndecl (t);
7150 if (fndecl == NULL_TREE
7151 || TREE_CODE (fndecl) != FUNCTION_DECL
7152 || ! DECL_BUILT_IN (fndecl)
7153 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7154 return END_BUILTINS;
7156 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7157 init_const_call_expr_arg_iterator (t, &iter);
7158 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7160 /* If a function doesn't take a variable number of arguments,
7161 the last element in the list will have type `void'. */
7162 parmtype = TREE_VALUE (parmlist);
7163 if (VOID_TYPE_P (parmtype))
7165 if (more_const_call_expr_args_p (&iter))
7166 return END_BUILTINS;
7167 return DECL_FUNCTION_CODE (fndecl);
7170 if (! more_const_call_expr_args_p (&iter))
7171 return END_BUILTINS;
7173 arg = next_const_call_expr_arg (&iter);
7174 argtype = TREE_TYPE (arg);
7176 if (SCALAR_FLOAT_TYPE_P (parmtype))
7178 if (! SCALAR_FLOAT_TYPE_P (argtype))
7179 return END_BUILTINS;
7181 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7183 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7184 return END_BUILTINS;
7186 else if (POINTER_TYPE_P (parmtype))
7188 if (! POINTER_TYPE_P (argtype))
7189 return END_BUILTINS;
7191 else if (INTEGRAL_TYPE_P (parmtype))
7193 if (! INTEGRAL_TYPE_P (argtype))
7194 return END_BUILTINS;
7196 else
7197 return END_BUILTINS;
7200 /* Variable-length argument list. */
7201 return DECL_FUNCTION_CODE (fndecl);
7204 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7205 evaluate to a constant. */
7207 static tree
7208 fold_builtin_constant_p (tree arg)
7210 /* We return 1 for a numeric type that's known to be a constant
7211 value at compile-time or for an aggregate type that's a
7212 literal constant. */
7213 STRIP_NOPS (arg);
7215 /* If we know this is a constant, emit the constant of one. */
7216 if (CONSTANT_CLASS_P (arg)
7217 || (TREE_CODE (arg) == CONSTRUCTOR
7218 && TREE_CONSTANT (arg)))
7219 return integer_one_node;
7220 if (TREE_CODE (arg) == ADDR_EXPR)
7222 tree op = TREE_OPERAND (arg, 0);
7223 if (TREE_CODE (op) == STRING_CST
7224 || (TREE_CODE (op) == ARRAY_REF
7225 && integer_zerop (TREE_OPERAND (op, 1))
7226 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7227 return integer_one_node;
7230 /* If this expression has side effects, show we don't know it to be a
7231 constant. Likewise if it's a pointer or aggregate type since in
7232 those case we only want literals, since those are only optimized
7233 when generating RTL, not later.
7234 And finally, if we are compiling an initializer, not code, we
7235 need to return a definite result now; there's not going to be any
7236 more optimization done. */
7237 if (TREE_SIDE_EFFECTS (arg)
7238 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7239 || POINTER_TYPE_P (TREE_TYPE (arg))
7240 || cfun == 0
7241 || folding_initializer)
7242 return integer_zero_node;
7244 return NULL_TREE;
7247 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7248 return it as a truthvalue. */
7250 static tree
7251 build_builtin_expect_predicate (tree pred, tree expected)
7253 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7255 fn = built_in_decls[BUILT_IN_EXPECT];
7256 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7257 ret_type = TREE_TYPE (TREE_TYPE (fn));
7258 pred_type = TREE_VALUE (arg_types);
7259 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7261 pred = fold_convert (pred_type, pred);
7262 expected = fold_convert (expected_type, expected);
7263 call_expr = build_call_expr (fn, 2, pred, expected);
7265 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7266 build_int_cst (ret_type, 0));
7269 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7270 NULL_TREE if no simplification is possible. */
7272 static tree
7273 fold_builtin_expect (tree arg0, tree arg1)
7275 tree inner, fndecl;
7276 enum tree_code code;
7278 /* If this is a builtin_expect within a builtin_expect keep the
7279 inner one. See through a comparison against a constant. It
7280 might have been added to create a thruthvalue. */
7281 inner = arg0;
7282 if (COMPARISON_CLASS_P (inner)
7283 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7284 inner = TREE_OPERAND (inner, 0);
7286 if (TREE_CODE (inner) == CALL_EXPR
7287 && (fndecl = get_callee_fndecl (inner))
7288 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7289 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7290 return arg0;
7292 /* Distribute the expected value over short-circuiting operators.
7293 See through the cast from truthvalue_type_node to long. */
7294 inner = arg0;
7295 while (TREE_CODE (inner) == NOP_EXPR
7296 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7297 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7298 inner = TREE_OPERAND (inner, 0);
7300 code = TREE_CODE (inner);
7301 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7303 tree op0 = TREE_OPERAND (inner, 0);
7304 tree op1 = TREE_OPERAND (inner, 1);
7306 op0 = build_builtin_expect_predicate (op0, arg1);
7307 op1 = build_builtin_expect_predicate (op1, arg1);
7308 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7310 return fold_convert (TREE_TYPE (arg0), inner);
7313 /* If the argument isn't invariant then there's nothing else we can do. */
7314 if (!TREE_CONSTANT (arg0))
7315 return NULL_TREE;
7317 /* If we expect that a comparison against the argument will fold to
7318 a constant return the constant. In practice, this means a true
7319 constant or the address of a non-weak symbol. */
7320 inner = arg0;
7321 STRIP_NOPS (inner);
7322 if (TREE_CODE (inner) == ADDR_EXPR)
7326 inner = TREE_OPERAND (inner, 0);
7328 while (TREE_CODE (inner) == COMPONENT_REF
7329 || TREE_CODE (inner) == ARRAY_REF);
7330 if ((TREE_CODE (inner) == VAR_DECL
7331 || TREE_CODE (inner) == FUNCTION_DECL)
7332 && DECL_WEAK (inner))
7333 return NULL_TREE;
7336 /* Otherwise, ARG0 already has the proper type for the return value. */
7337 return arg0;
7340 /* Fold a call to __builtin_classify_type with argument ARG. */
7342 static tree
7343 fold_builtin_classify_type (tree arg)
7345 if (arg == 0)
7346 return build_int_cst (NULL_TREE, no_type_class);
7348 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7351 /* Fold a call to __builtin_strlen with argument ARG. */
7353 static tree
7354 fold_builtin_strlen (tree arg)
7356 if (!validate_arg (arg, POINTER_TYPE))
7357 return NULL_TREE;
7358 else
7360 tree len = c_strlen (arg, 0);
7362 if (len)
7364 /* Convert from the internal "sizetype" type to "size_t". */
7365 if (size_type_node)
7366 len = fold_convert (size_type_node, len);
7367 return len;
7370 return NULL_TREE;
7374 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7376 static tree
7377 fold_builtin_inf (tree type, int warn)
7379 REAL_VALUE_TYPE real;
7381 /* __builtin_inff is intended to be usable to define INFINITY on all
7382 targets. If an infinity is not available, INFINITY expands "to a
7383 positive constant of type float that overflows at translation
7384 time", footnote "In this case, using INFINITY will violate the
7385 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7386 Thus we pedwarn to ensure this constraint violation is
7387 diagnosed. */
7388 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7389 pedwarn (input_location, 0, "target format does not support infinity");
7391 real_inf (&real);
7392 return build_real (type, real);
7395 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7397 static tree
7398 fold_builtin_nan (tree arg, tree type, int quiet)
7400 REAL_VALUE_TYPE real;
7401 const char *str;
7403 if (!validate_arg (arg, POINTER_TYPE))
7404 return NULL_TREE;
7405 str = c_getstr (arg);
7406 if (!str)
7407 return NULL_TREE;
7409 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7410 return NULL_TREE;
7412 return build_real (type, real);
7415 /* Return true if the floating point expression T has an integer value.
7416 We also allow +Inf, -Inf and NaN to be considered integer values. */
7418 static bool
7419 integer_valued_real_p (tree t)
7421 switch (TREE_CODE (t))
7423 case FLOAT_EXPR:
7424 return true;
7426 case ABS_EXPR:
7427 case SAVE_EXPR:
7428 return integer_valued_real_p (TREE_OPERAND (t, 0));
7430 case COMPOUND_EXPR:
7431 case MODIFY_EXPR:
7432 case BIND_EXPR:
7433 return integer_valued_real_p (TREE_OPERAND (t, 1));
7435 case PLUS_EXPR:
7436 case MINUS_EXPR:
7437 case MULT_EXPR:
7438 case MIN_EXPR:
7439 case MAX_EXPR:
7440 return integer_valued_real_p (TREE_OPERAND (t, 0))
7441 && integer_valued_real_p (TREE_OPERAND (t, 1));
7443 case COND_EXPR:
7444 return integer_valued_real_p (TREE_OPERAND (t, 1))
7445 && integer_valued_real_p (TREE_OPERAND (t, 2));
7447 case REAL_CST:
7448 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7450 case NOP_EXPR:
7452 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7453 if (TREE_CODE (type) == INTEGER_TYPE)
7454 return true;
7455 if (TREE_CODE (type) == REAL_TYPE)
7456 return integer_valued_real_p (TREE_OPERAND (t, 0));
7457 break;
7460 case CALL_EXPR:
7461 switch (builtin_mathfn_code (t))
7463 CASE_FLT_FN (BUILT_IN_CEIL):
7464 CASE_FLT_FN (BUILT_IN_FLOOR):
7465 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7466 CASE_FLT_FN (BUILT_IN_RINT):
7467 CASE_FLT_FN (BUILT_IN_ROUND):
7468 CASE_FLT_FN (BUILT_IN_TRUNC):
7469 return true;
7471 CASE_FLT_FN (BUILT_IN_FMIN):
7472 CASE_FLT_FN (BUILT_IN_FMAX):
7473 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7474 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7476 default:
7477 break;
7479 break;
7481 default:
7482 break;
7484 return false;
7487 /* FNDECL is assumed to be a builtin where truncation can be propagated
7488 across (for instance floor((double)f) == (double)floorf (f).
7489 Do the transformation for a call with argument ARG. */
7491 static tree
7492 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7494 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7496 if (!validate_arg (arg, REAL_TYPE))
7497 return NULL_TREE;
7499 /* Integer rounding functions are idempotent. */
7500 if (fcode == builtin_mathfn_code (arg))
7501 return arg;
7503 /* If argument is already integer valued, and we don't need to worry
7504 about setting errno, there's no need to perform rounding. */
7505 if (! flag_errno_math && integer_valued_real_p (arg))
7506 return arg;
7508 if (optimize)
7510 tree arg0 = strip_float_extensions (arg);
7511 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7512 tree newtype = TREE_TYPE (arg0);
7513 tree decl;
7515 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7516 && (decl = mathfn_built_in (newtype, fcode)))
7517 return fold_convert (ftype,
7518 build_call_expr (decl, 1,
7519 fold_convert (newtype, arg0)));
7521 return NULL_TREE;
7524 /* FNDECL is assumed to be builtin which can narrow the FP type of
7525 the argument, for instance lround((double)f) -> lroundf (f).
7526 Do the transformation for a call with argument ARG. */
7528 static tree
7529 fold_fixed_mathfn (tree fndecl, tree arg)
7531 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7533 if (!validate_arg (arg, REAL_TYPE))
7534 return NULL_TREE;
7536 /* If argument is already integer valued, and we don't need to worry
7537 about setting errno, there's no need to perform rounding. */
7538 if (! flag_errno_math && integer_valued_real_p (arg))
7539 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7541 if (optimize)
7543 tree ftype = TREE_TYPE (arg);
7544 tree arg0 = strip_float_extensions (arg);
7545 tree newtype = TREE_TYPE (arg0);
7546 tree decl;
7548 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7549 && (decl = mathfn_built_in (newtype, fcode)))
7550 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7553 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7554 sizeof (long long) == sizeof (long). */
7555 if (TYPE_PRECISION (long_long_integer_type_node)
7556 == TYPE_PRECISION (long_integer_type_node))
7558 tree newfn = NULL_TREE;
7559 switch (fcode)
7561 CASE_FLT_FN (BUILT_IN_LLCEIL):
7562 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7563 break;
7565 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7566 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7567 break;
7569 CASE_FLT_FN (BUILT_IN_LLROUND):
7570 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7571 break;
7573 CASE_FLT_FN (BUILT_IN_LLRINT):
7574 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7575 break;
7577 default:
7578 break;
7581 if (newfn)
7583 tree newcall = build_call_expr(newfn, 1, arg);
7584 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7588 return NULL_TREE;
7591 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7592 return type. Return NULL_TREE if no simplification can be made. */
7594 static tree
7595 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7597 tree res;
7599 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7600 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7601 return NULL_TREE;
7603 /* Calculate the result when the argument is a constant. */
7604 if (TREE_CODE (arg) == COMPLEX_CST
7605 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7606 type, mpfr_hypot)))
7607 return res;
7609 if (TREE_CODE (arg) == COMPLEX_EXPR)
7611 tree real = TREE_OPERAND (arg, 0);
7612 tree imag = TREE_OPERAND (arg, 1);
7614 /* If either part is zero, cabs is fabs of the other. */
7615 if (real_zerop (real))
7616 return fold_build1 (ABS_EXPR, type, imag);
7617 if (real_zerop (imag))
7618 return fold_build1 (ABS_EXPR, type, real);
7620 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7621 if (flag_unsafe_math_optimizations
7622 && operand_equal_p (real, imag, OEP_PURE_SAME))
7624 const REAL_VALUE_TYPE sqrt2_trunc
7625 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7626 STRIP_NOPS (real);
7627 return fold_build2 (MULT_EXPR, type,
7628 fold_build1 (ABS_EXPR, type, real),
7629 build_real (type, sqrt2_trunc));
7633 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7634 if (TREE_CODE (arg) == NEGATE_EXPR
7635 || TREE_CODE (arg) == CONJ_EXPR)
7636 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7638 /* Don't do this when optimizing for size. */
7639 if (flag_unsafe_math_optimizations
7640 && optimize && optimize_function_for_speed_p (cfun))
7642 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7644 if (sqrtfn != NULL_TREE)
7646 tree rpart, ipart, result;
7648 arg = builtin_save_expr (arg);
7650 rpart = fold_build1 (REALPART_EXPR, type, arg);
7651 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7653 rpart = builtin_save_expr (rpart);
7654 ipart = builtin_save_expr (ipart);
7656 result = fold_build2 (PLUS_EXPR, type,
7657 fold_build2 (MULT_EXPR, type,
7658 rpart, rpart),
7659 fold_build2 (MULT_EXPR, type,
7660 ipart, ipart));
7662 return build_call_expr (sqrtfn, 1, result);
7666 return NULL_TREE;
7669 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7670 Return NULL_TREE if no simplification can be made. */
7672 static tree
7673 fold_builtin_sqrt (tree arg, tree type)
7676 enum built_in_function fcode;
7677 tree res;
7679 if (!validate_arg (arg, REAL_TYPE))
7680 return NULL_TREE;
7682 /* Calculate the result when the argument is a constant. */
7683 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7684 return res;
7686 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7687 fcode = builtin_mathfn_code (arg);
7688 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7690 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7691 arg = fold_build2 (MULT_EXPR, type,
7692 CALL_EXPR_ARG (arg, 0),
7693 build_real (type, dconsthalf));
7694 return build_call_expr (expfn, 1, arg);
7697 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7698 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7700 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7702 if (powfn)
7704 tree arg0 = CALL_EXPR_ARG (arg, 0);
7705 tree tree_root;
7706 /* The inner root was either sqrt or cbrt. */
7707 /* This was a conditional expression but it triggered a bug
7708 in Sun C 5.5. */
7709 REAL_VALUE_TYPE dconstroot;
7710 if (BUILTIN_SQRT_P (fcode))
7711 dconstroot = dconsthalf;
7712 else
7713 dconstroot = dconst_third ();
7715 /* Adjust for the outer root. */
7716 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7717 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7718 tree_root = build_real (type, dconstroot);
7719 return build_call_expr (powfn, 2, arg0, tree_root);
7723 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7724 if (flag_unsafe_math_optimizations
7725 && (fcode == BUILT_IN_POW
7726 || fcode == BUILT_IN_POWF
7727 || fcode == BUILT_IN_POWL))
7729 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7730 tree arg0 = CALL_EXPR_ARG (arg, 0);
7731 tree arg1 = CALL_EXPR_ARG (arg, 1);
7732 tree narg1;
7733 if (!tree_expr_nonnegative_p (arg0))
7734 arg0 = build1 (ABS_EXPR, type, arg0);
7735 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7736 build_real (type, dconsthalf));
7737 return build_call_expr (powfn, 2, arg0, narg1);
7740 return NULL_TREE;
7743 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7744 Return NULL_TREE if no simplification can be made. */
7746 static tree
7747 fold_builtin_cbrt (tree arg, tree type)
7749 const enum built_in_function fcode = builtin_mathfn_code (arg);
7750 tree res;
7752 if (!validate_arg (arg, REAL_TYPE))
7753 return NULL_TREE;
7755 /* Calculate the result when the argument is a constant. */
7756 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7757 return res;
7759 if (flag_unsafe_math_optimizations)
7761 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7762 if (BUILTIN_EXPONENT_P (fcode))
7764 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7765 const REAL_VALUE_TYPE third_trunc =
7766 real_value_truncate (TYPE_MODE (type), dconst_third ());
7767 arg = fold_build2 (MULT_EXPR, type,
7768 CALL_EXPR_ARG (arg, 0),
7769 build_real (type, third_trunc));
7770 return build_call_expr (expfn, 1, arg);
7773 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7774 if (BUILTIN_SQRT_P (fcode))
7776 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7778 if (powfn)
7780 tree arg0 = CALL_EXPR_ARG (arg, 0);
7781 tree tree_root;
7782 REAL_VALUE_TYPE dconstroot = dconst_third ();
7784 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7785 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7786 tree_root = build_real (type, dconstroot);
7787 return build_call_expr (powfn, 2, arg0, tree_root);
7791 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7792 if (BUILTIN_CBRT_P (fcode))
7794 tree arg0 = CALL_EXPR_ARG (arg, 0);
7795 if (tree_expr_nonnegative_p (arg0))
7797 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7799 if (powfn)
7801 tree tree_root;
7802 REAL_VALUE_TYPE dconstroot;
7804 real_arithmetic (&dconstroot, MULT_EXPR,
7805 dconst_third_ptr (), dconst_third_ptr ());
7806 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7807 tree_root = build_real (type, dconstroot);
7808 return build_call_expr (powfn, 2, arg0, tree_root);
7813 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7814 if (fcode == BUILT_IN_POW
7815 || fcode == BUILT_IN_POWF
7816 || fcode == BUILT_IN_POWL)
7818 tree arg00 = CALL_EXPR_ARG (arg, 0);
7819 tree arg01 = CALL_EXPR_ARG (arg, 1);
7820 if (tree_expr_nonnegative_p (arg00))
7822 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7823 const REAL_VALUE_TYPE dconstroot
7824 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7825 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7826 build_real (type, dconstroot));
7827 return build_call_expr (powfn, 2, arg00, narg01);
7831 return NULL_TREE;
7834 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7835 TYPE is the type of the return value. Return NULL_TREE if no
7836 simplification can be made. */
7838 static tree
7839 fold_builtin_cos (tree arg, tree type, tree fndecl)
7841 tree res, narg;
7843 if (!validate_arg (arg, REAL_TYPE))
7844 return NULL_TREE;
7846 /* Calculate the result when the argument is a constant. */
7847 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7848 return res;
7850 /* Optimize cos(-x) into cos (x). */
7851 if ((narg = fold_strip_sign_ops (arg)))
7852 return build_call_expr (fndecl, 1, narg);
7854 return NULL_TREE;
7857 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7858 Return NULL_TREE if no simplification can be made. */
7860 static tree
7861 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7863 if (validate_arg (arg, REAL_TYPE))
7865 tree res, narg;
7867 /* Calculate the result when the argument is a constant. */
7868 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7869 return res;
7871 /* Optimize cosh(-x) into cosh (x). */
7872 if ((narg = fold_strip_sign_ops (arg)))
7873 return build_call_expr (fndecl, 1, narg);
7876 return NULL_TREE;
7879 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7880 Return NULL_TREE if no simplification can be made. */
7882 static tree
7883 fold_builtin_tan (tree arg, tree type)
7885 enum built_in_function fcode;
7886 tree res;
7888 if (!validate_arg (arg, REAL_TYPE))
7889 return NULL_TREE;
7891 /* Calculate the result when the argument is a constant. */
7892 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7893 return res;
7895 /* Optimize tan(atan(x)) = x. */
7896 fcode = builtin_mathfn_code (arg);
7897 if (flag_unsafe_math_optimizations
7898 && (fcode == BUILT_IN_ATAN
7899 || fcode == BUILT_IN_ATANF
7900 || fcode == BUILT_IN_ATANL))
7901 return CALL_EXPR_ARG (arg, 0);
7903 return NULL_TREE;
7906 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7907 NULL_TREE if no simplification can be made. */
7909 static tree
7910 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7912 tree type;
7913 tree res, fn, call;
7915 if (!validate_arg (arg0, REAL_TYPE)
7916 || !validate_arg (arg1, POINTER_TYPE)
7917 || !validate_arg (arg2, POINTER_TYPE))
7918 return NULL_TREE;
7920 type = TREE_TYPE (arg0);
7922 /* Calculate the result when the argument is a constant. */
7923 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7924 return res;
7926 /* Canonicalize sincos to cexpi. */
7927 if (!TARGET_C99_FUNCTIONS)
7928 return NULL_TREE;
7929 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7930 if (!fn)
7931 return NULL_TREE;
7933 call = build_call_expr (fn, 1, arg0);
7934 call = builtin_save_expr (call);
7936 return build2 (COMPOUND_EXPR, type,
7937 build2 (MODIFY_EXPR, void_type_node,
7938 build_fold_indirect_ref (arg1),
7939 build1 (IMAGPART_EXPR, type, call)),
7940 build2 (MODIFY_EXPR, void_type_node,
7941 build_fold_indirect_ref (arg2),
7942 build1 (REALPART_EXPR, type, call)));
7945 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7946 NULL_TREE if no simplification can be made. */
7948 static tree
7949 fold_builtin_cexp (tree arg0, tree type)
7951 tree rtype;
7952 tree realp, imagp, ifn;
7954 if (!validate_arg (arg0, COMPLEX_TYPE))
7955 return NULL_TREE;
7957 rtype = TREE_TYPE (TREE_TYPE (arg0));
7959 /* In case we can figure out the real part of arg0 and it is constant zero
7960 fold to cexpi. */
7961 if (!TARGET_C99_FUNCTIONS)
7962 return NULL_TREE;
7963 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7964 if (!ifn)
7965 return NULL_TREE;
7967 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7968 && real_zerop (realp))
7970 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7971 return build_call_expr (ifn, 1, narg);
7974 /* In case we can easily decompose real and imaginary parts split cexp
7975 to exp (r) * cexpi (i). */
7976 if (flag_unsafe_math_optimizations
7977 && realp)
7979 tree rfn, rcall, icall;
7981 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7982 if (!rfn)
7983 return NULL_TREE;
7985 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7986 if (!imagp)
7987 return NULL_TREE;
7989 icall = build_call_expr (ifn, 1, imagp);
7990 icall = builtin_save_expr (icall);
7991 rcall = build_call_expr (rfn, 1, realp);
7992 rcall = builtin_save_expr (rcall);
7993 return fold_build2 (COMPLEX_EXPR, type,
7994 fold_build2 (MULT_EXPR, rtype,
7995 rcall,
7996 fold_build1 (REALPART_EXPR, rtype, icall)),
7997 fold_build2 (MULT_EXPR, rtype,
7998 rcall,
7999 fold_build1 (IMAGPART_EXPR, rtype, icall)));
8002 return NULL_TREE;
8005 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8006 Return NULL_TREE if no simplification can be made. */
8008 static tree
8009 fold_builtin_trunc (tree fndecl, tree arg)
8011 if (!validate_arg (arg, REAL_TYPE))
8012 return NULL_TREE;
8014 /* Optimize trunc of constant value. */
8015 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8017 REAL_VALUE_TYPE r, x;
8018 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8020 x = TREE_REAL_CST (arg);
8021 real_trunc (&r, TYPE_MODE (type), &x);
8022 return build_real (type, r);
8025 return fold_trunc_transparent_mathfn (fndecl, arg);
8028 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8029 Return NULL_TREE if no simplification can be made. */
8031 static tree
8032 fold_builtin_floor (tree fndecl, tree arg)
8034 if (!validate_arg (arg, REAL_TYPE))
8035 return NULL_TREE;
8037 /* Optimize floor of constant value. */
8038 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8040 REAL_VALUE_TYPE x;
8042 x = TREE_REAL_CST (arg);
8043 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8045 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8046 REAL_VALUE_TYPE r;
8048 real_floor (&r, TYPE_MODE (type), &x);
8049 return build_real (type, r);
8053 /* Fold floor (x) where x is nonnegative to trunc (x). */
8054 if (tree_expr_nonnegative_p (arg))
8056 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8057 if (truncfn)
8058 return build_call_expr (truncfn, 1, arg);
8061 return fold_trunc_transparent_mathfn (fndecl, arg);
8064 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8065 Return NULL_TREE if no simplification can be made. */
8067 static tree
8068 fold_builtin_ceil (tree fndecl, tree arg)
8070 if (!validate_arg (arg, REAL_TYPE))
8071 return NULL_TREE;
8073 /* Optimize ceil of constant value. */
8074 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8076 REAL_VALUE_TYPE x;
8078 x = TREE_REAL_CST (arg);
8079 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8081 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8082 REAL_VALUE_TYPE r;
8084 real_ceil (&r, TYPE_MODE (type), &x);
8085 return build_real (type, r);
8089 return fold_trunc_transparent_mathfn (fndecl, arg);
8092 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8093 Return NULL_TREE if no simplification can be made. */
8095 static tree
8096 fold_builtin_round (tree fndecl, tree arg)
8098 if (!validate_arg (arg, REAL_TYPE))
8099 return NULL_TREE;
8101 /* Optimize round of constant value. */
8102 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8104 REAL_VALUE_TYPE x;
8106 x = TREE_REAL_CST (arg);
8107 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8109 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8110 REAL_VALUE_TYPE r;
8112 real_round (&r, TYPE_MODE (type), &x);
8113 return build_real (type, r);
8117 return fold_trunc_transparent_mathfn (fndecl, arg);
8120 /* Fold function call to builtin lround, lroundf or lroundl (or the
8121 corresponding long long versions) and other rounding functions. ARG
8122 is the argument to the call. Return NULL_TREE if no simplification
8123 can be made. */
8125 static tree
8126 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8128 if (!validate_arg (arg, REAL_TYPE))
8129 return NULL_TREE;
8131 /* Optimize lround of constant value. */
8132 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8134 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8136 if (real_isfinite (&x))
8138 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8139 tree ftype = TREE_TYPE (arg);
8140 unsigned HOST_WIDE_INT lo2;
8141 HOST_WIDE_INT hi, lo;
8142 REAL_VALUE_TYPE r;
8144 switch (DECL_FUNCTION_CODE (fndecl))
8146 CASE_FLT_FN (BUILT_IN_LFLOOR):
8147 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8148 real_floor (&r, TYPE_MODE (ftype), &x);
8149 break;
8151 CASE_FLT_FN (BUILT_IN_LCEIL):
8152 CASE_FLT_FN (BUILT_IN_LLCEIL):
8153 real_ceil (&r, TYPE_MODE (ftype), &x);
8154 break;
8156 CASE_FLT_FN (BUILT_IN_LROUND):
8157 CASE_FLT_FN (BUILT_IN_LLROUND):
8158 real_round (&r, TYPE_MODE (ftype), &x);
8159 break;
8161 default:
8162 gcc_unreachable ();
8165 REAL_VALUE_TO_INT (&lo, &hi, r);
8166 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8167 return build_int_cst_wide (itype, lo2, hi);
8171 switch (DECL_FUNCTION_CODE (fndecl))
8173 CASE_FLT_FN (BUILT_IN_LFLOOR):
8174 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8175 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8176 if (tree_expr_nonnegative_p (arg))
8177 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8178 arg);
8179 break;
8180 default:;
8183 return fold_fixed_mathfn (fndecl, arg);
8186 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8187 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8188 the argument to the call. Return NULL_TREE if no simplification can
8189 be made. */
8191 static tree
8192 fold_builtin_bitop (tree fndecl, tree arg)
8194 if (!validate_arg (arg, INTEGER_TYPE))
8195 return NULL_TREE;
8197 /* Optimize for constant argument. */
8198 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8200 HOST_WIDE_INT hi, width, result;
8201 unsigned HOST_WIDE_INT lo;
8202 tree type;
8204 type = TREE_TYPE (arg);
8205 width = TYPE_PRECISION (type);
8206 lo = TREE_INT_CST_LOW (arg);
8208 /* Clear all the bits that are beyond the type's precision. */
8209 if (width > HOST_BITS_PER_WIDE_INT)
8211 hi = TREE_INT_CST_HIGH (arg);
8212 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8213 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8215 else
8217 hi = 0;
8218 if (width < HOST_BITS_PER_WIDE_INT)
8219 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8222 switch (DECL_FUNCTION_CODE (fndecl))
8224 CASE_INT_FN (BUILT_IN_FFS):
8225 if (lo != 0)
8226 result = exact_log2 (lo & -lo) + 1;
8227 else if (hi != 0)
8228 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8229 else
8230 result = 0;
8231 break;
8233 CASE_INT_FN (BUILT_IN_CLZ):
8234 if (hi != 0)
8235 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8236 else if (lo != 0)
8237 result = width - floor_log2 (lo) - 1;
8238 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8239 result = width;
8240 break;
8242 CASE_INT_FN (BUILT_IN_CTZ):
8243 if (lo != 0)
8244 result = exact_log2 (lo & -lo);
8245 else if (hi != 0)
8246 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8247 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8248 result = width;
8249 break;
8251 CASE_INT_FN (BUILT_IN_POPCOUNT):
8252 result = 0;
8253 while (lo)
8254 result++, lo &= lo - 1;
8255 while (hi)
8256 result++, hi &= hi - 1;
8257 break;
8259 CASE_INT_FN (BUILT_IN_PARITY):
8260 result = 0;
8261 while (lo)
8262 result++, lo &= lo - 1;
8263 while (hi)
8264 result++, hi &= hi - 1;
8265 result &= 1;
8266 break;
8268 default:
8269 gcc_unreachable ();
8272 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8275 return NULL_TREE;
8278 /* Fold function call to builtin_bswap and the long and long long
8279 variants. Return NULL_TREE if no simplification can be made. */
8280 static tree
8281 fold_builtin_bswap (tree fndecl, tree arg)
8283 if (! validate_arg (arg, INTEGER_TYPE))
8284 return NULL_TREE;
8286 /* Optimize constant value. */
8287 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8289 HOST_WIDE_INT hi, width, r_hi = 0;
8290 unsigned HOST_WIDE_INT lo, r_lo = 0;
8291 tree type;
8293 type = TREE_TYPE (arg);
8294 width = TYPE_PRECISION (type);
8295 lo = TREE_INT_CST_LOW (arg);
8296 hi = TREE_INT_CST_HIGH (arg);
8298 switch (DECL_FUNCTION_CODE (fndecl))
8300 case BUILT_IN_BSWAP32:
8301 case BUILT_IN_BSWAP64:
8303 int s;
8305 for (s = 0; s < width; s += 8)
8307 int d = width - s - 8;
8308 unsigned HOST_WIDE_INT byte;
8310 if (s < HOST_BITS_PER_WIDE_INT)
8311 byte = (lo >> s) & 0xff;
8312 else
8313 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8315 if (d < HOST_BITS_PER_WIDE_INT)
8316 r_lo |= byte << d;
8317 else
8318 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8322 break;
8324 default:
8325 gcc_unreachable ();
8328 if (width < HOST_BITS_PER_WIDE_INT)
8329 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8330 else
8331 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8334 return NULL_TREE;
8337 /* Return true if EXPR is the real constant contained in VALUE. */
8339 static bool
8340 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8342 STRIP_NOPS (expr);
8344 return ((TREE_CODE (expr) == REAL_CST
8345 && !TREE_OVERFLOW (expr)
8346 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8347 || (TREE_CODE (expr) == COMPLEX_CST
8348 && real_dconstp (TREE_REALPART (expr), value)
8349 && real_zerop (TREE_IMAGPART (expr))));
8352 /* A subroutine of fold_builtin to fold the various logarithmic
8353 functions. Return NULL_TREE if no simplification can me made.
8354 FUNC is the corresponding MPFR logarithm function. */
8356 static tree
8357 fold_builtin_logarithm (tree fndecl, tree arg,
8358 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8360 if (validate_arg (arg, REAL_TYPE))
8362 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8363 tree res;
8364 const enum built_in_function fcode = builtin_mathfn_code (arg);
8366 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8367 instead we'll look for 'e' truncated to MODE. So only do
8368 this if flag_unsafe_math_optimizations is set. */
8369 if (flag_unsafe_math_optimizations && func == mpfr_log)
8371 const REAL_VALUE_TYPE e_truncated =
8372 real_value_truncate (TYPE_MODE (type), dconst_e ());
8373 if (real_dconstp (arg, &e_truncated))
8374 return build_real (type, dconst1);
8377 /* Calculate the result when the argument is a constant. */
8378 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8379 return res;
8381 /* Special case, optimize logN(expN(x)) = x. */
8382 if (flag_unsafe_math_optimizations
8383 && ((func == mpfr_log
8384 && (fcode == BUILT_IN_EXP
8385 || fcode == BUILT_IN_EXPF
8386 || fcode == BUILT_IN_EXPL))
8387 || (func == mpfr_log2
8388 && (fcode == BUILT_IN_EXP2
8389 || fcode == BUILT_IN_EXP2F
8390 || fcode == BUILT_IN_EXP2L))
8391 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8392 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8394 /* Optimize logN(func()) for various exponential functions. We
8395 want to determine the value "x" and the power "exponent" in
8396 order to transform logN(x**exponent) into exponent*logN(x). */
8397 if (flag_unsafe_math_optimizations)
8399 tree exponent = 0, x = 0;
8401 switch (fcode)
8403 CASE_FLT_FN (BUILT_IN_EXP):
8404 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8405 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8406 dconst_e ()));
8407 exponent = CALL_EXPR_ARG (arg, 0);
8408 break;
8409 CASE_FLT_FN (BUILT_IN_EXP2):
8410 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8411 x = build_real (type, dconst2);
8412 exponent = CALL_EXPR_ARG (arg, 0);
8413 break;
8414 CASE_FLT_FN (BUILT_IN_EXP10):
8415 CASE_FLT_FN (BUILT_IN_POW10):
8416 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8418 REAL_VALUE_TYPE dconst10;
8419 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8420 x = build_real (type, dconst10);
8422 exponent = CALL_EXPR_ARG (arg, 0);
8423 break;
8424 CASE_FLT_FN (BUILT_IN_SQRT):
8425 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8426 x = CALL_EXPR_ARG (arg, 0);
8427 exponent = build_real (type, dconsthalf);
8428 break;
8429 CASE_FLT_FN (BUILT_IN_CBRT):
8430 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8431 x = CALL_EXPR_ARG (arg, 0);
8432 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8433 dconst_third ()));
8434 break;
8435 CASE_FLT_FN (BUILT_IN_POW):
8436 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8437 x = CALL_EXPR_ARG (arg, 0);
8438 exponent = CALL_EXPR_ARG (arg, 1);
8439 break;
8440 default:
8441 break;
8444 /* Now perform the optimization. */
8445 if (x && exponent)
8447 tree logfn = build_call_expr (fndecl, 1, x);
8448 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8453 return NULL_TREE;
8456 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8457 NULL_TREE if no simplification can be made. */
8459 static tree
8460 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8462 tree res, narg0, narg1;
8464 if (!validate_arg (arg0, REAL_TYPE)
8465 || !validate_arg (arg1, REAL_TYPE))
8466 return NULL_TREE;
8468 /* Calculate the result when the argument is a constant. */
8469 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8470 return res;
8472 /* If either argument to hypot has a negate or abs, strip that off.
8473 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8474 narg0 = fold_strip_sign_ops (arg0);
8475 narg1 = fold_strip_sign_ops (arg1);
8476 if (narg0 || narg1)
8478 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8479 narg1 ? narg1 : arg1);
8482 /* If either argument is zero, hypot is fabs of the other. */
8483 if (real_zerop (arg0))
8484 return fold_build1 (ABS_EXPR, type, arg1);
8485 else if (real_zerop (arg1))
8486 return fold_build1 (ABS_EXPR, type, arg0);
8488 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8489 if (flag_unsafe_math_optimizations
8490 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8492 const REAL_VALUE_TYPE sqrt2_trunc
8493 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8494 return fold_build2 (MULT_EXPR, type,
8495 fold_build1 (ABS_EXPR, type, arg0),
8496 build_real (type, sqrt2_trunc));
8499 return NULL_TREE;
8503 /* Fold a builtin function call to pow, powf, or powl. Return
8504 NULL_TREE if no simplification can be made. */
8505 static tree
8506 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8508 tree res;
8510 if (!validate_arg (arg0, REAL_TYPE)
8511 || !validate_arg (arg1, REAL_TYPE))
8512 return NULL_TREE;
8514 /* Calculate the result when the argument is a constant. */
8515 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8516 return res;
8518 /* Optimize pow(1.0,y) = 1.0. */
8519 if (real_onep (arg0))
8520 return omit_one_operand (type, build_real (type, dconst1), arg1);
8522 if (TREE_CODE (arg1) == REAL_CST
8523 && !TREE_OVERFLOW (arg1))
8525 REAL_VALUE_TYPE cint;
8526 REAL_VALUE_TYPE c;
8527 HOST_WIDE_INT n;
8529 c = TREE_REAL_CST (arg1);
8531 /* Optimize pow(x,0.0) = 1.0. */
8532 if (REAL_VALUES_EQUAL (c, dconst0))
8533 return omit_one_operand (type, build_real (type, dconst1),
8534 arg0);
8536 /* Optimize pow(x,1.0) = x. */
8537 if (REAL_VALUES_EQUAL (c, dconst1))
8538 return arg0;
8540 /* Optimize pow(x,-1.0) = 1.0/x. */
8541 if (REAL_VALUES_EQUAL (c, dconstm1))
8542 return fold_build2 (RDIV_EXPR, type,
8543 build_real (type, dconst1), arg0);
8545 /* Optimize pow(x,0.5) = sqrt(x). */
8546 if (flag_unsafe_math_optimizations
8547 && REAL_VALUES_EQUAL (c, dconsthalf))
8549 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8551 if (sqrtfn != NULL_TREE)
8552 return build_call_expr (sqrtfn, 1, arg0);
8555 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8556 if (flag_unsafe_math_optimizations)
8558 const REAL_VALUE_TYPE dconstroot
8559 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8561 if (REAL_VALUES_EQUAL (c, dconstroot))
8563 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8564 if (cbrtfn != NULL_TREE)
8565 return build_call_expr (cbrtfn, 1, arg0);
8569 /* Check for an integer exponent. */
8570 n = real_to_integer (&c);
8571 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8572 if (real_identical (&c, &cint))
8574 /* Attempt to evaluate pow at compile-time, unless this should
8575 raise an exception. */
8576 if (TREE_CODE (arg0) == REAL_CST
8577 && !TREE_OVERFLOW (arg0)
8578 && (n > 0
8579 || (!flag_trapping_math && !flag_errno_math)
8580 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8582 REAL_VALUE_TYPE x;
8583 bool inexact;
8585 x = TREE_REAL_CST (arg0);
8586 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8587 if (flag_unsafe_math_optimizations || !inexact)
8588 return build_real (type, x);
8591 /* Strip sign ops from even integer powers. */
8592 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8594 tree narg0 = fold_strip_sign_ops (arg0);
8595 if (narg0)
8596 return build_call_expr (fndecl, 2, narg0, arg1);
8601 if (flag_unsafe_math_optimizations)
8603 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8605 /* Optimize pow(expN(x),y) = expN(x*y). */
8606 if (BUILTIN_EXPONENT_P (fcode))
8608 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8609 tree arg = CALL_EXPR_ARG (arg0, 0);
8610 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8611 return build_call_expr (expfn, 1, arg);
8614 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8615 if (BUILTIN_SQRT_P (fcode))
8617 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8618 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8619 build_real (type, dconsthalf));
8620 return build_call_expr (fndecl, 2, narg0, narg1);
8623 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8624 if (BUILTIN_CBRT_P (fcode))
8626 tree arg = CALL_EXPR_ARG (arg0, 0);
8627 if (tree_expr_nonnegative_p (arg))
8629 const REAL_VALUE_TYPE dconstroot
8630 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8631 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8632 build_real (type, dconstroot));
8633 return build_call_expr (fndecl, 2, arg, narg1);
8637 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8638 if (fcode == BUILT_IN_POW
8639 || fcode == BUILT_IN_POWF
8640 || fcode == BUILT_IN_POWL)
8642 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8643 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8644 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8645 return build_call_expr (fndecl, 2, arg00, narg1);
8649 return NULL_TREE;
8652 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8653 Return NULL_TREE if no simplification can be made. */
8654 static tree
8655 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8656 tree arg0, tree arg1, tree type)
8658 if (!validate_arg (arg0, REAL_TYPE)
8659 || !validate_arg (arg1, INTEGER_TYPE))
8660 return NULL_TREE;
8662 /* Optimize pow(1.0,y) = 1.0. */
8663 if (real_onep (arg0))
8664 return omit_one_operand (type, build_real (type, dconst1), arg1);
8666 if (host_integerp (arg1, 0))
8668 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8670 /* Evaluate powi at compile-time. */
8671 if (TREE_CODE (arg0) == REAL_CST
8672 && !TREE_OVERFLOW (arg0))
8674 REAL_VALUE_TYPE x;
8675 x = TREE_REAL_CST (arg0);
8676 real_powi (&x, TYPE_MODE (type), &x, c);
8677 return build_real (type, x);
8680 /* Optimize pow(x,0) = 1.0. */
8681 if (c == 0)
8682 return omit_one_operand (type, build_real (type, dconst1),
8683 arg0);
8685 /* Optimize pow(x,1) = x. */
8686 if (c == 1)
8687 return arg0;
8689 /* Optimize pow(x,-1) = 1.0/x. */
8690 if (c == -1)
8691 return fold_build2 (RDIV_EXPR, type,
8692 build_real (type, dconst1), arg0);
8695 return NULL_TREE;
8698 /* A subroutine of fold_builtin to fold the various exponent
8699 functions. Return NULL_TREE if no simplification can be made.
8700 FUNC is the corresponding MPFR exponent function. */
8702 static tree
8703 fold_builtin_exponent (tree fndecl, tree arg,
8704 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8706 if (validate_arg (arg, REAL_TYPE))
8708 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8709 tree res;
8711 /* Calculate the result when the argument is a constant. */
8712 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8713 return res;
8715 /* Optimize expN(logN(x)) = x. */
8716 if (flag_unsafe_math_optimizations)
8718 const enum built_in_function fcode = builtin_mathfn_code (arg);
8720 if ((func == mpfr_exp
8721 && (fcode == BUILT_IN_LOG
8722 || fcode == BUILT_IN_LOGF
8723 || fcode == BUILT_IN_LOGL))
8724 || (func == mpfr_exp2
8725 && (fcode == BUILT_IN_LOG2
8726 || fcode == BUILT_IN_LOG2F
8727 || fcode == BUILT_IN_LOG2L))
8728 || (func == mpfr_exp10
8729 && (fcode == BUILT_IN_LOG10
8730 || fcode == BUILT_IN_LOG10F
8731 || fcode == BUILT_IN_LOG10L)))
8732 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8736 return NULL_TREE;
8739 /* Return true if VAR is a VAR_DECL or a component thereof. */
8741 static bool
8742 var_decl_component_p (tree var)
8744 tree inner = var;
8745 while (handled_component_p (inner))
8746 inner = TREE_OPERAND (inner, 0);
8747 return SSA_VAR_P (inner);
8750 /* Fold function call to builtin memset. Return
8751 NULL_TREE if no simplification can be made. */
8753 static tree
8754 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8756 tree var, ret;
8757 unsigned HOST_WIDE_INT length, cval;
8759 if (! validate_arg (dest, POINTER_TYPE)
8760 || ! validate_arg (c, INTEGER_TYPE)
8761 || ! validate_arg (len, INTEGER_TYPE))
8762 return NULL_TREE;
8764 if (! host_integerp (len, 1))
8765 return NULL_TREE;
8767 /* If the LEN parameter is zero, return DEST. */
8768 if (integer_zerop (len))
8769 return omit_one_operand (type, dest, c);
8771 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8772 return NULL_TREE;
8774 var = dest;
8775 STRIP_NOPS (var);
8776 if (TREE_CODE (var) != ADDR_EXPR)
8777 return NULL_TREE;
8779 var = TREE_OPERAND (var, 0);
8780 if (TREE_THIS_VOLATILE (var))
8781 return NULL_TREE;
8783 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8784 && !POINTER_TYPE_P (TREE_TYPE (var)))
8785 return NULL_TREE;
8787 if (! var_decl_component_p (var))
8788 return NULL_TREE;
8790 length = tree_low_cst (len, 1);
8791 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8792 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8793 < (int) length)
8794 return NULL_TREE;
8796 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8797 return NULL_TREE;
8799 if (integer_zerop (c))
8800 cval = 0;
8801 else
8803 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8804 return NULL_TREE;
8806 cval = tree_low_cst (c, 1);
8807 cval &= 0xff;
8808 cval |= cval << 8;
8809 cval |= cval << 16;
8810 cval |= (cval << 31) << 1;
8813 ret = build_int_cst_type (TREE_TYPE (var), cval);
8814 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8815 if (ignore)
8816 return ret;
8818 return omit_one_operand (type, dest, ret);
8821 /* Fold function call to builtin memset. Return
8822 NULL_TREE if no simplification can be made. */
8824 static tree
8825 fold_builtin_bzero (tree dest, tree size, bool ignore)
8827 if (! validate_arg (dest, POINTER_TYPE)
8828 || ! validate_arg (size, INTEGER_TYPE))
8829 return NULL_TREE;
8831 if (!ignore)
8832 return NULL_TREE;
8834 /* New argument list transforming bzero(ptr x, int y) to
8835 memset(ptr x, int 0, size_t y). This is done this way
8836 so that if it isn't expanded inline, we fallback to
8837 calling bzero instead of memset. */
8839 return fold_builtin_memset (dest, integer_zero_node,
8840 fold_convert (sizetype, size),
8841 void_type_node, ignore);
8844 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8845 NULL_TREE if no simplification can be made.
8846 If ENDP is 0, return DEST (like memcpy).
8847 If ENDP is 1, return DEST+LEN (like mempcpy).
8848 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8849 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8850 (memmove). */
8852 static tree
8853 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8855 tree destvar, srcvar, expr;
8857 if (! validate_arg (dest, POINTER_TYPE)
8858 || ! validate_arg (src, POINTER_TYPE)
8859 || ! validate_arg (len, INTEGER_TYPE))
8860 return NULL_TREE;
8862 /* If the LEN parameter is zero, return DEST. */
8863 if (integer_zerop (len))
8864 return omit_one_operand (type, dest, src);
8866 /* If SRC and DEST are the same (and not volatile), return
8867 DEST{,+LEN,+LEN-1}. */
8868 if (operand_equal_p (src, dest, 0))
8869 expr = len;
8870 else
8872 tree srctype, desttype;
8873 int src_align, dest_align;
8875 if (endp == 3)
8877 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8878 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8880 /* Both DEST and SRC must be pointer types.
8881 ??? This is what old code did. Is the testing for pointer types
8882 really mandatory?
8884 If either SRC is readonly or length is 1, we can use memcpy. */
8885 if (dest_align && src_align
8886 && (readonly_data_expr (src)
8887 || (host_integerp (len, 1)
8888 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8889 tree_low_cst (len, 1)))))
8891 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8892 if (!fn)
8893 return NULL_TREE;
8894 return build_call_expr (fn, 3, dest, src, len);
8896 return NULL_TREE;
8899 if (!host_integerp (len, 0))
8900 return NULL_TREE;
8901 /* FIXME:
8902 This logic lose for arguments like (type *)malloc (sizeof (type)),
8903 since we strip the casts of up to VOID return value from malloc.
8904 Perhaps we ought to inherit type from non-VOID argument here? */
8905 STRIP_NOPS (src);
8906 STRIP_NOPS (dest);
8907 srctype = TREE_TYPE (TREE_TYPE (src));
8908 desttype = TREE_TYPE (TREE_TYPE (dest));
8909 if (!srctype || !desttype
8910 || !TYPE_SIZE_UNIT (srctype)
8911 || !TYPE_SIZE_UNIT (desttype)
8912 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8913 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8914 || TYPE_VOLATILE (srctype)
8915 || TYPE_VOLATILE (desttype))
8916 return NULL_TREE;
8918 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8919 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8920 if (dest_align < (int) TYPE_ALIGN (desttype)
8921 || src_align < (int) TYPE_ALIGN (srctype))
8922 return NULL_TREE;
8924 if (!ignore)
8925 dest = builtin_save_expr (dest);
8927 srcvar = NULL_TREE;
8928 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8930 srcvar = build_fold_indirect_ref (src);
8931 if (TREE_THIS_VOLATILE (srcvar))
8932 return NULL_TREE;
8933 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8934 srcvar = NULL_TREE;
8935 /* With memcpy, it is possible to bypass aliasing rules, so without
8936 this check i.e. execute/20060930-2.c would be misoptimized,
8937 because it use conflicting alias set to hold argument for the
8938 memcpy call. This check is probably unnecessary with
8939 -fno-strict-aliasing. Similarly for destvar. See also
8940 PR29286. */
8941 else if (!var_decl_component_p (srcvar))
8942 srcvar = NULL_TREE;
8945 destvar = NULL_TREE;
8946 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8948 destvar = build_fold_indirect_ref (dest);
8949 if (TREE_THIS_VOLATILE (destvar))
8950 return NULL_TREE;
8951 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8952 destvar = NULL_TREE;
8953 else if (!var_decl_component_p (destvar))
8954 destvar = NULL_TREE;
8957 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8958 return NULL_TREE;
8960 if (srcvar == NULL_TREE)
8962 tree srcptype;
8963 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8964 return NULL_TREE;
8966 srctype = build_qualified_type (desttype, 0);
8967 if (src_align < (int) TYPE_ALIGN (srctype))
8969 if (AGGREGATE_TYPE_P (srctype)
8970 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8971 return NULL_TREE;
8973 srctype = build_variant_type_copy (srctype);
8974 TYPE_ALIGN (srctype) = src_align;
8975 TYPE_USER_ALIGN (srctype) = 1;
8976 TYPE_PACKED (srctype) = 1;
8978 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8979 src = fold_convert (srcptype, src);
8980 srcvar = build_fold_indirect_ref (src);
8982 else if (destvar == NULL_TREE)
8984 tree destptype;
8985 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8986 return NULL_TREE;
8988 desttype = build_qualified_type (srctype, 0);
8989 if (dest_align < (int) TYPE_ALIGN (desttype))
8991 if (AGGREGATE_TYPE_P (desttype)
8992 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8993 return NULL_TREE;
8995 desttype = build_variant_type_copy (desttype);
8996 TYPE_ALIGN (desttype) = dest_align;
8997 TYPE_USER_ALIGN (desttype) = 1;
8998 TYPE_PACKED (desttype) = 1;
9000 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9001 dest = fold_convert (destptype, dest);
9002 destvar = build_fold_indirect_ref (dest);
9005 if (srctype == desttype
9006 || (gimple_in_ssa_p (cfun)
9007 && useless_type_conversion_p (desttype, srctype)))
9008 expr = srcvar;
9009 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9010 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9011 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9012 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9013 expr = fold_convert (TREE_TYPE (destvar), srcvar);
9014 else
9015 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
9016 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9019 if (ignore)
9020 return expr;
9022 if (endp == 0 || endp == 3)
9023 return omit_one_operand (type, dest, expr);
9025 if (expr == len)
9026 expr = NULL_TREE;
9028 if (endp == 2)
9029 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
9030 ssize_int (1));
9032 len = fold_convert (sizetype, len);
9033 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9034 dest = fold_convert (type, dest);
9035 if (expr)
9036 dest = omit_one_operand (type, dest, expr);
9037 return dest;
9040 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9041 If LEN is not NULL, it represents the length of the string to be
9042 copied. Return NULL_TREE if no simplification can be made. */
9044 tree
9045 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
9047 tree fn;
9049 if (!validate_arg (dest, POINTER_TYPE)
9050 || !validate_arg (src, POINTER_TYPE))
9051 return NULL_TREE;
9053 /* If SRC and DEST are the same (and not volatile), return DEST. */
9054 if (operand_equal_p (src, dest, 0))
9055 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
9057 if (optimize_function_for_size_p (cfun))
9058 return NULL_TREE;
9060 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9061 if (!fn)
9062 return NULL_TREE;
9064 if (!len)
9066 len = c_strlen (src, 1);
9067 if (! len || TREE_SIDE_EFFECTS (len))
9068 return NULL_TREE;
9071 len = size_binop (PLUS_EXPR, len, ssize_int (1));
9072 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9073 build_call_expr (fn, 3, dest, src, len));
9076 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9077 If SLEN is not NULL, it represents the length of the source string.
9078 Return NULL_TREE if no simplification can be made. */
9080 tree
9081 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
9083 tree fn;
9085 if (!validate_arg (dest, POINTER_TYPE)
9086 || !validate_arg (src, POINTER_TYPE)
9087 || !validate_arg (len, INTEGER_TYPE))
9088 return NULL_TREE;
9090 /* If the LEN parameter is zero, return DEST. */
9091 if (integer_zerop (len))
9092 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9094 /* We can't compare slen with len as constants below if len is not a
9095 constant. */
9096 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9097 return NULL_TREE;
9099 if (!slen)
9100 slen = c_strlen (src, 1);
9102 /* Now, we must be passed a constant src ptr parameter. */
9103 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9104 return NULL_TREE;
9106 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
9108 /* We do not support simplification of this case, though we do
9109 support it when expanding trees into RTL. */
9110 /* FIXME: generate a call to __builtin_memset. */
9111 if (tree_int_cst_lt (slen, len))
9112 return NULL_TREE;
9114 /* OK transform into builtin memcpy. */
9115 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9116 if (!fn)
9117 return NULL_TREE;
9118 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9119 build_call_expr (fn, 3, dest, src, len));
9122 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9123 arguments to the call, and TYPE is its return type.
9124 Return NULL_TREE if no simplification can be made. */
9126 static tree
9127 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
9129 if (!validate_arg (arg1, POINTER_TYPE)
9130 || !validate_arg (arg2, INTEGER_TYPE)
9131 || !validate_arg (len, INTEGER_TYPE))
9132 return NULL_TREE;
9133 else
9135 const char *p1;
9137 if (TREE_CODE (arg2) != INTEGER_CST
9138 || !host_integerp (len, 1))
9139 return NULL_TREE;
9141 p1 = c_getstr (arg1);
9142 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9144 char c;
9145 const char *r;
9146 tree tem;
9148 if (target_char_cast (arg2, &c))
9149 return NULL_TREE;
9151 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9153 if (r == NULL)
9154 return build_int_cst (TREE_TYPE (arg1), 0);
9156 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9157 size_int (r - p1));
9158 return fold_convert (type, tem);
9160 return NULL_TREE;
9164 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9165 Return NULL_TREE if no simplification can be made. */
9167 static tree
9168 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9170 const char *p1, *p2;
9172 if (!validate_arg (arg1, POINTER_TYPE)
9173 || !validate_arg (arg2, POINTER_TYPE)
9174 || !validate_arg (len, INTEGER_TYPE))
9175 return NULL_TREE;
9177 /* If the LEN parameter is zero, return zero. */
9178 if (integer_zerop (len))
9179 return omit_two_operands (integer_type_node, integer_zero_node,
9180 arg1, arg2);
9182 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9183 if (operand_equal_p (arg1, arg2, 0))
9184 return omit_one_operand (integer_type_node, integer_zero_node, len);
9186 p1 = c_getstr (arg1);
9187 p2 = c_getstr (arg2);
9189 /* If all arguments are constant, and the value of len is not greater
9190 than the lengths of arg1 and arg2, evaluate at compile-time. */
9191 if (host_integerp (len, 1) && p1 && p2
9192 && compare_tree_int (len, strlen (p1) + 1) <= 0
9193 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9195 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9197 if (r > 0)
9198 return integer_one_node;
9199 else if (r < 0)
9200 return integer_minus_one_node;
9201 else
9202 return integer_zero_node;
9205 /* If len parameter is one, return an expression corresponding to
9206 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9207 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9209 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9210 tree cst_uchar_ptr_node
9211 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9213 tree ind1 = fold_convert (integer_type_node,
9214 build1 (INDIRECT_REF, cst_uchar_node,
9215 fold_convert (cst_uchar_ptr_node,
9216 arg1)));
9217 tree ind2 = fold_convert (integer_type_node,
9218 build1 (INDIRECT_REF, cst_uchar_node,
9219 fold_convert (cst_uchar_ptr_node,
9220 arg2)));
9221 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9224 return NULL_TREE;
9227 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9228 Return NULL_TREE if no simplification can be made. */
9230 static tree
9231 fold_builtin_strcmp (tree arg1, tree arg2)
9233 const char *p1, *p2;
9235 if (!validate_arg (arg1, POINTER_TYPE)
9236 || !validate_arg (arg2, POINTER_TYPE))
9237 return NULL_TREE;
9239 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9240 if (operand_equal_p (arg1, arg2, 0))
9241 return integer_zero_node;
9243 p1 = c_getstr (arg1);
9244 p2 = c_getstr (arg2);
9246 if (p1 && p2)
9248 const int i = strcmp (p1, p2);
9249 if (i < 0)
9250 return integer_minus_one_node;
9251 else if (i > 0)
9252 return integer_one_node;
9253 else
9254 return integer_zero_node;
9257 /* If the second arg is "", return *(const unsigned char*)arg1. */
9258 if (p2 && *p2 == '\0')
9260 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9261 tree cst_uchar_ptr_node
9262 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9264 return fold_convert (integer_type_node,
9265 build1 (INDIRECT_REF, cst_uchar_node,
9266 fold_convert (cst_uchar_ptr_node,
9267 arg1)));
9270 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9271 if (p1 && *p1 == '\0')
9273 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9274 tree cst_uchar_ptr_node
9275 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9277 tree temp = fold_convert (integer_type_node,
9278 build1 (INDIRECT_REF, cst_uchar_node,
9279 fold_convert (cst_uchar_ptr_node,
9280 arg2)));
9281 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9284 return NULL_TREE;
9287 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9288 Return NULL_TREE if no simplification can be made. */
9290 static tree
9291 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9293 const char *p1, *p2;
9295 if (!validate_arg (arg1, POINTER_TYPE)
9296 || !validate_arg (arg2, POINTER_TYPE)
9297 || !validate_arg (len, INTEGER_TYPE))
9298 return NULL_TREE;
9300 /* If the LEN parameter is zero, return zero. */
9301 if (integer_zerop (len))
9302 return omit_two_operands (integer_type_node, integer_zero_node,
9303 arg1, arg2);
9305 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9306 if (operand_equal_p (arg1, arg2, 0))
9307 return omit_one_operand (integer_type_node, integer_zero_node, len);
9309 p1 = c_getstr (arg1);
9310 p2 = c_getstr (arg2);
9312 if (host_integerp (len, 1) && p1 && p2)
9314 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9315 if (i > 0)
9316 return integer_one_node;
9317 else if (i < 0)
9318 return integer_minus_one_node;
9319 else
9320 return integer_zero_node;
9323 /* If the second arg is "", and the length is greater than zero,
9324 return *(const unsigned char*)arg1. */
9325 if (p2 && *p2 == '\0'
9326 && TREE_CODE (len) == INTEGER_CST
9327 && tree_int_cst_sgn (len) == 1)
9329 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9330 tree cst_uchar_ptr_node
9331 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9333 return fold_convert (integer_type_node,
9334 build1 (INDIRECT_REF, cst_uchar_node,
9335 fold_convert (cst_uchar_ptr_node,
9336 arg1)));
9339 /* If the first arg is "", and the length is greater than zero,
9340 return -*(const unsigned char*)arg2. */
9341 if (p1 && *p1 == '\0'
9342 && TREE_CODE (len) == INTEGER_CST
9343 && tree_int_cst_sgn (len) == 1)
9345 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9346 tree cst_uchar_ptr_node
9347 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9349 tree temp = fold_convert (integer_type_node,
9350 build1 (INDIRECT_REF, cst_uchar_node,
9351 fold_convert (cst_uchar_ptr_node,
9352 arg2)));
9353 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9356 /* If len parameter is one, return an expression corresponding to
9357 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9358 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9360 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9361 tree cst_uchar_ptr_node
9362 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9364 tree ind1 = fold_convert (integer_type_node,
9365 build1 (INDIRECT_REF, cst_uchar_node,
9366 fold_convert (cst_uchar_ptr_node,
9367 arg1)));
9368 tree ind2 = fold_convert (integer_type_node,
9369 build1 (INDIRECT_REF, cst_uchar_node,
9370 fold_convert (cst_uchar_ptr_node,
9371 arg2)));
9372 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9375 return NULL_TREE;
9378 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9379 ARG. Return NULL_TREE if no simplification can be made. */
9381 static tree
9382 fold_builtin_signbit (tree arg, tree type)
9384 tree temp;
9386 if (!validate_arg (arg, REAL_TYPE))
9387 return NULL_TREE;
9389 /* If ARG is a compile-time constant, determine the result. */
9390 if (TREE_CODE (arg) == REAL_CST
9391 && !TREE_OVERFLOW (arg))
9393 REAL_VALUE_TYPE c;
9395 c = TREE_REAL_CST (arg);
9396 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9397 return fold_convert (type, temp);
9400 /* If ARG is non-negative, the result is always zero. */
9401 if (tree_expr_nonnegative_p (arg))
9402 return omit_one_operand (type, integer_zero_node, arg);
9404 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9405 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9406 return fold_build2 (LT_EXPR, type, arg,
9407 build_real (TREE_TYPE (arg), dconst0));
9409 return NULL_TREE;
9412 /* Fold function call to builtin copysign, copysignf or copysignl with
9413 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9414 be made. */
9416 static tree
9417 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9419 tree tem;
9421 if (!validate_arg (arg1, REAL_TYPE)
9422 || !validate_arg (arg2, REAL_TYPE))
9423 return NULL_TREE;
9425 /* copysign(X,X) is X. */
9426 if (operand_equal_p (arg1, arg2, 0))
9427 return fold_convert (type, arg1);
9429 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9430 if (TREE_CODE (arg1) == REAL_CST
9431 && TREE_CODE (arg2) == REAL_CST
9432 && !TREE_OVERFLOW (arg1)
9433 && !TREE_OVERFLOW (arg2))
9435 REAL_VALUE_TYPE c1, c2;
9437 c1 = TREE_REAL_CST (arg1);
9438 c2 = TREE_REAL_CST (arg2);
9439 /* c1.sign := c2.sign. */
9440 real_copysign (&c1, &c2);
9441 return build_real (type, c1);
9444 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9445 Remember to evaluate Y for side-effects. */
9446 if (tree_expr_nonnegative_p (arg2))
9447 return omit_one_operand (type,
9448 fold_build1 (ABS_EXPR, type, arg1),
9449 arg2);
9451 /* Strip sign changing operations for the first argument. */
9452 tem = fold_strip_sign_ops (arg1);
9453 if (tem)
9454 return build_call_expr (fndecl, 2, tem, arg2);
9456 return NULL_TREE;
9459 /* Fold a call to builtin isascii with argument ARG. */
9461 static tree
9462 fold_builtin_isascii (tree arg)
9464 if (!validate_arg (arg, INTEGER_TYPE))
9465 return NULL_TREE;
9466 else
9468 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9469 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9470 build_int_cst (NULL_TREE,
9471 ~ (unsigned HOST_WIDE_INT) 0x7f));
9472 return fold_build2 (EQ_EXPR, integer_type_node,
9473 arg, integer_zero_node);
9477 /* Fold a call to builtin toascii with argument ARG. */
9479 static tree
9480 fold_builtin_toascii (tree arg)
9482 if (!validate_arg (arg, INTEGER_TYPE))
9483 return NULL_TREE;
9485 /* Transform toascii(c) -> (c & 0x7f). */
9486 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9487 build_int_cst (NULL_TREE, 0x7f));
9490 /* Fold a call to builtin isdigit with argument ARG. */
9492 static tree
9493 fold_builtin_isdigit (tree arg)
9495 if (!validate_arg (arg, INTEGER_TYPE))
9496 return NULL_TREE;
9497 else
9499 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9500 /* According to the C standard, isdigit is unaffected by locale.
9501 However, it definitely is affected by the target character set. */
9502 unsigned HOST_WIDE_INT target_digit0
9503 = lang_hooks.to_target_charset ('0');
9505 if (target_digit0 == 0)
9506 return NULL_TREE;
9508 arg = fold_convert (unsigned_type_node, arg);
9509 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9510 build_int_cst (unsigned_type_node, target_digit0));
9511 return fold_build2 (LE_EXPR, integer_type_node, arg,
9512 build_int_cst (unsigned_type_node, 9));
9516 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9518 static tree
9519 fold_builtin_fabs (tree arg, tree type)
9521 if (!validate_arg (arg, REAL_TYPE))
9522 return NULL_TREE;
9524 arg = fold_convert (type, arg);
9525 if (TREE_CODE (arg) == REAL_CST)
9526 return fold_abs_const (arg, type);
9527 return fold_build1 (ABS_EXPR, type, arg);
9530 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9532 static tree
9533 fold_builtin_abs (tree arg, tree type)
9535 if (!validate_arg (arg, INTEGER_TYPE))
9536 return NULL_TREE;
9538 arg = fold_convert (type, arg);
9539 if (TREE_CODE (arg) == INTEGER_CST)
9540 return fold_abs_const (arg, type);
9541 return fold_build1 (ABS_EXPR, type, arg);
9544 /* Fold a call to builtin fmin or fmax. */
9546 static tree
9547 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9549 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9551 /* Calculate the result when the argument is a constant. */
9552 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9554 if (res)
9555 return res;
9557 /* If either argument is NaN, return the other one. Avoid the
9558 transformation if we get (and honor) a signalling NaN. Using
9559 omit_one_operand() ensures we create a non-lvalue. */
9560 if (TREE_CODE (arg0) == REAL_CST
9561 && real_isnan (&TREE_REAL_CST (arg0))
9562 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9563 || ! TREE_REAL_CST (arg0).signalling))
9564 return omit_one_operand (type, arg1, arg0);
9565 if (TREE_CODE (arg1) == REAL_CST
9566 && real_isnan (&TREE_REAL_CST (arg1))
9567 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9568 || ! TREE_REAL_CST (arg1).signalling))
9569 return omit_one_operand (type, arg0, arg1);
9571 /* Transform fmin/fmax(x,x) -> x. */
9572 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9573 return omit_one_operand (type, arg0, arg1);
9575 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9576 functions to return the numeric arg if the other one is NaN.
9577 These tree codes don't honor that, so only transform if
9578 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9579 handled, so we don't have to worry about it either. */
9580 if (flag_finite_math_only)
9581 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9582 fold_convert (type, arg0),
9583 fold_convert (type, arg1));
9585 return NULL_TREE;
9588 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9590 static tree
9591 fold_builtin_carg (tree arg, tree type)
9593 if (validate_arg (arg, COMPLEX_TYPE))
9595 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9597 if (atan2_fn)
9599 tree new_arg = builtin_save_expr (arg);
9600 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9601 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9602 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9606 return NULL_TREE;
9609 /* Fold a call to builtin logb/ilogb. */
9611 static tree
9612 fold_builtin_logb (tree arg, tree rettype)
9614 if (! validate_arg (arg, REAL_TYPE))
9615 return NULL_TREE;
9617 STRIP_NOPS (arg);
9619 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9621 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9623 switch (value->cl)
9625 case rvc_nan:
9626 case rvc_inf:
9627 /* If arg is Inf or NaN and we're logb, return it. */
9628 if (TREE_CODE (rettype) == REAL_TYPE)
9629 return fold_convert (rettype, arg);
9630 /* Fall through... */
9631 case rvc_zero:
9632 /* Zero may set errno and/or raise an exception for logb, also
9633 for ilogb we don't know FP_ILOGB0. */
9634 return NULL_TREE;
9635 case rvc_normal:
9636 /* For normal numbers, proceed iff radix == 2. In GCC,
9637 normalized significands are in the range [0.5, 1.0). We
9638 want the exponent as if they were [1.0, 2.0) so get the
9639 exponent and subtract 1. */
9640 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9641 return fold_convert (rettype, build_int_cst (NULL_TREE,
9642 REAL_EXP (value)-1));
9643 break;
9647 return NULL_TREE;
9650 /* Fold a call to builtin significand, if radix == 2. */
9652 static tree
9653 fold_builtin_significand (tree arg, tree rettype)
9655 if (! validate_arg (arg, REAL_TYPE))
9656 return NULL_TREE;
9658 STRIP_NOPS (arg);
9660 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9662 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9664 switch (value->cl)
9666 case rvc_zero:
9667 case rvc_nan:
9668 case rvc_inf:
9669 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9670 return fold_convert (rettype, arg);
9671 case rvc_normal:
9672 /* For normal numbers, proceed iff radix == 2. */
9673 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9675 REAL_VALUE_TYPE result = *value;
9676 /* In GCC, normalized significands are in the range [0.5,
9677 1.0). We want them to be [1.0, 2.0) so set the
9678 exponent to 1. */
9679 SET_REAL_EXP (&result, 1);
9680 return build_real (rettype, result);
9682 break;
9686 return NULL_TREE;
9689 /* Fold a call to builtin frexp, we can assume the base is 2. */
9691 static tree
9692 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9694 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9695 return NULL_TREE;
9697 STRIP_NOPS (arg0);
9699 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9700 return NULL_TREE;
9702 arg1 = build_fold_indirect_ref (arg1);
9704 /* Proceed if a valid pointer type was passed in. */
9705 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9707 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9708 tree frac, exp;
9710 switch (value->cl)
9712 case rvc_zero:
9713 /* For +-0, return (*exp = 0, +-0). */
9714 exp = integer_zero_node;
9715 frac = arg0;
9716 break;
9717 case rvc_nan:
9718 case rvc_inf:
9719 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9720 return omit_one_operand (rettype, arg0, arg1);
9721 case rvc_normal:
9723 /* Since the frexp function always expects base 2, and in
9724 GCC normalized significands are already in the range
9725 [0.5, 1.0), we have exactly what frexp wants. */
9726 REAL_VALUE_TYPE frac_rvt = *value;
9727 SET_REAL_EXP (&frac_rvt, 0);
9728 frac = build_real (rettype, frac_rvt);
9729 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9731 break;
9732 default:
9733 gcc_unreachable ();
9736 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9737 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9738 TREE_SIDE_EFFECTS (arg1) = 1;
9739 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9742 return NULL_TREE;
9745 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9746 then we can assume the base is two. If it's false, then we have to
9747 check the mode of the TYPE parameter in certain cases. */
9749 static tree
9750 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9752 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9754 STRIP_NOPS (arg0);
9755 STRIP_NOPS (arg1);
9757 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9758 if (real_zerop (arg0) || integer_zerop (arg1)
9759 || (TREE_CODE (arg0) == REAL_CST
9760 && !real_isfinite (&TREE_REAL_CST (arg0))))
9761 return omit_one_operand (type, arg0, arg1);
9763 /* If both arguments are constant, then try to evaluate it. */
9764 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9765 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9766 && host_integerp (arg1, 0))
9768 /* Bound the maximum adjustment to twice the range of the
9769 mode's valid exponents. Use abs to ensure the range is
9770 positive as a sanity check. */
9771 const long max_exp_adj = 2 *
9772 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9773 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9775 /* Get the user-requested adjustment. */
9776 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9778 /* The requested adjustment must be inside this range. This
9779 is a preliminary cap to avoid things like overflow, we
9780 may still fail to compute the result for other reasons. */
9781 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9783 REAL_VALUE_TYPE initial_result;
9785 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9787 /* Ensure we didn't overflow. */
9788 if (! real_isinf (&initial_result))
9790 const REAL_VALUE_TYPE trunc_result
9791 = real_value_truncate (TYPE_MODE (type), initial_result);
9793 /* Only proceed if the target mode can hold the
9794 resulting value. */
9795 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9796 return build_real (type, trunc_result);
9802 return NULL_TREE;
9805 /* Fold a call to builtin modf. */
9807 static tree
9808 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9810 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9811 return NULL_TREE;
9813 STRIP_NOPS (arg0);
9815 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9816 return NULL_TREE;
9818 arg1 = build_fold_indirect_ref (arg1);
9820 /* Proceed if a valid pointer type was passed in. */
9821 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9823 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9824 REAL_VALUE_TYPE trunc, frac;
9826 switch (value->cl)
9828 case rvc_nan:
9829 case rvc_zero:
9830 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9831 trunc = frac = *value;
9832 break;
9833 case rvc_inf:
9834 /* For +-Inf, return (*arg1 = arg0, +-0). */
9835 frac = dconst0;
9836 frac.sign = value->sign;
9837 trunc = *value;
9838 break;
9839 case rvc_normal:
9840 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9841 real_trunc (&trunc, VOIDmode, value);
9842 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9843 /* If the original number was negative and already
9844 integral, then the fractional part is -0.0. */
9845 if (value->sign && frac.cl == rvc_zero)
9846 frac.sign = value->sign;
9847 break;
9850 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9851 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9852 build_real (rettype, trunc));
9853 TREE_SIDE_EFFECTS (arg1) = 1;
9854 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9855 build_real (rettype, frac));
9858 return NULL_TREE;
9861 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9862 ARG is the argument for the call. */
9864 static tree
9865 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9867 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9868 REAL_VALUE_TYPE r;
9870 if (!validate_arg (arg, REAL_TYPE))
9871 return NULL_TREE;
9873 switch (builtin_index)
9875 case BUILT_IN_ISINF:
9876 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9877 return omit_one_operand (type, integer_zero_node, arg);
9879 if (TREE_CODE (arg) == REAL_CST)
9881 r = TREE_REAL_CST (arg);
9882 if (real_isinf (&r))
9883 return real_compare (GT_EXPR, &r, &dconst0)
9884 ? integer_one_node : integer_minus_one_node;
9885 else
9886 return integer_zero_node;
9889 return NULL_TREE;
9891 case BUILT_IN_ISINF_SIGN:
9893 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9894 /* In a boolean context, GCC will fold the inner COND_EXPR to
9895 1. So e.g. "if (isinf_sign(x))" would be folded to just
9896 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9897 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9898 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9899 tree tmp = NULL_TREE;
9901 arg = builtin_save_expr (arg);
9903 if (signbit_fn && isinf_fn)
9905 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9906 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9908 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9909 signbit_call, integer_zero_node);
9910 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9911 isinf_call, integer_zero_node);
9913 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9914 integer_minus_one_node, integer_one_node);
9915 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9916 integer_zero_node);
9919 return tmp;
9922 case BUILT_IN_ISFINITE:
9923 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9924 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9925 return omit_one_operand (type, integer_one_node, arg);
9927 if (TREE_CODE (arg) == REAL_CST)
9929 r = TREE_REAL_CST (arg);
9930 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9933 return NULL_TREE;
9935 case BUILT_IN_ISNAN:
9936 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9937 return omit_one_operand (type, integer_zero_node, arg);
9939 if (TREE_CODE (arg) == REAL_CST)
9941 r = TREE_REAL_CST (arg);
9942 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9945 arg = builtin_save_expr (arg);
9946 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9948 default:
9949 gcc_unreachable ();
9953 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9954 This builtin will generate code to return the appropriate floating
9955 point classification depending on the value of the floating point
9956 number passed in. The possible return values must be supplied as
9957 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9958 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9959 one floating point argument which is "type generic". */
9961 static tree
9962 fold_builtin_fpclassify (tree exp)
9964 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9965 arg, type, res, tmp;
9966 enum machine_mode mode;
9967 REAL_VALUE_TYPE r;
9968 char buf[128];
9970 /* Verify the required arguments in the original call. */
9971 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9972 INTEGER_TYPE, INTEGER_TYPE,
9973 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9974 return NULL_TREE;
9976 fp_nan = CALL_EXPR_ARG (exp, 0);
9977 fp_infinite = CALL_EXPR_ARG (exp, 1);
9978 fp_normal = CALL_EXPR_ARG (exp, 2);
9979 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9980 fp_zero = CALL_EXPR_ARG (exp, 4);
9981 arg = CALL_EXPR_ARG (exp, 5);
9982 type = TREE_TYPE (arg);
9983 mode = TYPE_MODE (type);
9984 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9986 /* fpclassify(x) ->
9987 isnan(x) ? FP_NAN :
9988 (fabs(x) == Inf ? FP_INFINITE :
9989 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9990 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9992 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9993 build_real (type, dconst0));
9994 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9996 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9997 real_from_string (&r, buf);
9998 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9999 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
10001 if (HONOR_INFINITIES (mode))
10003 real_inf (&r);
10004 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10005 build_real (type, r));
10006 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
10009 if (HONOR_NANS (mode))
10011 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
10012 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
10015 return res;
10018 /* Fold a call to an unordered comparison function such as
10019 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10020 being called and ARG0 and ARG1 are the arguments for the call.
10021 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10022 the opposite of the desired result. UNORDERED_CODE is used
10023 for modes that can hold NaNs and ORDERED_CODE is used for
10024 the rest. */
10026 static tree
10027 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
10028 enum tree_code unordered_code,
10029 enum tree_code ordered_code)
10031 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10032 enum tree_code code;
10033 tree type0, type1;
10034 enum tree_code code0, code1;
10035 tree cmp_type = NULL_TREE;
10037 type0 = TREE_TYPE (arg0);
10038 type1 = TREE_TYPE (arg1);
10040 code0 = TREE_CODE (type0);
10041 code1 = TREE_CODE (type1);
10043 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10044 /* Choose the wider of two real types. */
10045 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10046 ? type0 : type1;
10047 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10048 cmp_type = type0;
10049 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10050 cmp_type = type1;
10052 arg0 = fold_convert (cmp_type, arg0);
10053 arg1 = fold_convert (cmp_type, arg1);
10055 if (unordered_code == UNORDERED_EXPR)
10057 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10058 return omit_two_operands (type, integer_zero_node, arg0, arg1);
10059 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
10062 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10063 : ordered_code;
10064 return fold_build1 (TRUTH_NOT_EXPR, type,
10065 fold_build2 (code, type, arg0, arg1));
10068 /* Fold a call to built-in function FNDECL with 0 arguments.
10069 IGNORE is true if the result of the function call is ignored. This
10070 function returns NULL_TREE if no simplification was possible. */
10072 static tree
10073 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10075 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10076 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10077 switch (fcode)
10079 CASE_FLT_FN (BUILT_IN_INF):
10080 case BUILT_IN_INFD32:
10081 case BUILT_IN_INFD64:
10082 case BUILT_IN_INFD128:
10083 return fold_builtin_inf (type, true);
10085 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10086 return fold_builtin_inf (type, false);
10088 case BUILT_IN_CLASSIFY_TYPE:
10089 return fold_builtin_classify_type (NULL_TREE);
10091 default:
10092 break;
10094 return NULL_TREE;
10097 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10098 IGNORE is true if the result of the function call is ignored. This
10099 function returns NULL_TREE if no simplification was possible. */
10101 static tree
10102 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
10104 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10105 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10106 switch (fcode)
10109 case BUILT_IN_CONSTANT_P:
10111 tree val = fold_builtin_constant_p (arg0);
10113 /* Gimplification will pull the CALL_EXPR for the builtin out of
10114 an if condition. When not optimizing, we'll not CSE it back.
10115 To avoid link error types of regressions, return false now. */
10116 if (!val && !optimize)
10117 val = integer_zero_node;
10119 return val;
10122 case BUILT_IN_CLASSIFY_TYPE:
10123 return fold_builtin_classify_type (arg0);
10125 case BUILT_IN_STRLEN:
10126 return fold_builtin_strlen (arg0);
10128 CASE_FLT_FN (BUILT_IN_FABS):
10129 return fold_builtin_fabs (arg0, type);
10131 case BUILT_IN_ABS:
10132 case BUILT_IN_LABS:
10133 case BUILT_IN_LLABS:
10134 case BUILT_IN_IMAXABS:
10135 return fold_builtin_abs (arg0, type);
10137 CASE_FLT_FN (BUILT_IN_CONJ):
10138 if (validate_arg (arg0, COMPLEX_TYPE))
10139 return fold_build1 (CONJ_EXPR, type, arg0);
10140 break;
10142 CASE_FLT_FN (BUILT_IN_CREAL):
10143 if (validate_arg (arg0, COMPLEX_TYPE))
10144 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
10145 break;
10147 CASE_FLT_FN (BUILT_IN_CIMAG):
10148 if (validate_arg (arg0, COMPLEX_TYPE))
10149 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10150 break;
10152 CASE_FLT_FN (BUILT_IN_CCOS):
10153 CASE_FLT_FN (BUILT_IN_CCOSH):
10154 /* These functions are "even", i.e. f(x) == f(-x). */
10155 if (validate_arg (arg0, COMPLEX_TYPE))
10157 tree narg = fold_strip_sign_ops (arg0);
10158 if (narg)
10159 return build_call_expr (fndecl, 1, narg);
10161 break;
10163 CASE_FLT_FN (BUILT_IN_CABS):
10164 return fold_builtin_cabs (arg0, type, fndecl);
10166 CASE_FLT_FN (BUILT_IN_CARG):
10167 return fold_builtin_carg (arg0, type);
10169 CASE_FLT_FN (BUILT_IN_SQRT):
10170 return fold_builtin_sqrt (arg0, type);
10172 CASE_FLT_FN (BUILT_IN_CBRT):
10173 return fold_builtin_cbrt (arg0, type);
10175 CASE_FLT_FN (BUILT_IN_ASIN):
10176 if (validate_arg (arg0, REAL_TYPE))
10177 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10178 &dconstm1, &dconst1, true);
10179 break;
10181 CASE_FLT_FN (BUILT_IN_ACOS):
10182 if (validate_arg (arg0, REAL_TYPE))
10183 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10184 &dconstm1, &dconst1, true);
10185 break;
10187 CASE_FLT_FN (BUILT_IN_ATAN):
10188 if (validate_arg (arg0, REAL_TYPE))
10189 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10190 break;
10192 CASE_FLT_FN (BUILT_IN_ASINH):
10193 if (validate_arg (arg0, REAL_TYPE))
10194 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10195 break;
10197 CASE_FLT_FN (BUILT_IN_ACOSH):
10198 if (validate_arg (arg0, REAL_TYPE))
10199 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10200 &dconst1, NULL, true);
10201 break;
10203 CASE_FLT_FN (BUILT_IN_ATANH):
10204 if (validate_arg (arg0, REAL_TYPE))
10205 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10206 &dconstm1, &dconst1, false);
10207 break;
10209 CASE_FLT_FN (BUILT_IN_SIN):
10210 if (validate_arg (arg0, REAL_TYPE))
10211 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10212 break;
10214 CASE_FLT_FN (BUILT_IN_COS):
10215 return fold_builtin_cos (arg0, type, fndecl);
10216 break;
10218 CASE_FLT_FN (BUILT_IN_TAN):
10219 return fold_builtin_tan (arg0, type);
10221 CASE_FLT_FN (BUILT_IN_CEXP):
10222 return fold_builtin_cexp (arg0, type);
10224 CASE_FLT_FN (BUILT_IN_CEXPI):
10225 if (validate_arg (arg0, REAL_TYPE))
10226 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10227 break;
10229 CASE_FLT_FN (BUILT_IN_SINH):
10230 if (validate_arg (arg0, REAL_TYPE))
10231 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10232 break;
10234 CASE_FLT_FN (BUILT_IN_COSH):
10235 return fold_builtin_cosh (arg0, type, fndecl);
10237 CASE_FLT_FN (BUILT_IN_TANH):
10238 if (validate_arg (arg0, REAL_TYPE))
10239 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10240 break;
10242 CASE_FLT_FN (BUILT_IN_ERF):
10243 if (validate_arg (arg0, REAL_TYPE))
10244 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10245 break;
10247 CASE_FLT_FN (BUILT_IN_ERFC):
10248 if (validate_arg (arg0, REAL_TYPE))
10249 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10250 break;
10252 CASE_FLT_FN (BUILT_IN_TGAMMA):
10253 if (validate_arg (arg0, REAL_TYPE))
10254 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10255 break;
10257 CASE_FLT_FN (BUILT_IN_EXP):
10258 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10260 CASE_FLT_FN (BUILT_IN_EXP2):
10261 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10263 CASE_FLT_FN (BUILT_IN_EXP10):
10264 CASE_FLT_FN (BUILT_IN_POW10):
10265 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10267 CASE_FLT_FN (BUILT_IN_EXPM1):
10268 if (validate_arg (arg0, REAL_TYPE))
10269 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10270 break;
10272 CASE_FLT_FN (BUILT_IN_LOG):
10273 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10275 CASE_FLT_FN (BUILT_IN_LOG2):
10276 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10278 CASE_FLT_FN (BUILT_IN_LOG10):
10279 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10281 CASE_FLT_FN (BUILT_IN_LOG1P):
10282 if (validate_arg (arg0, REAL_TYPE))
10283 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10284 &dconstm1, NULL, false);
10285 break;
10287 CASE_FLT_FN (BUILT_IN_J0):
10288 if (validate_arg (arg0, REAL_TYPE))
10289 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10290 NULL, NULL, 0);
10291 break;
10293 CASE_FLT_FN (BUILT_IN_J1):
10294 if (validate_arg (arg0, REAL_TYPE))
10295 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10296 NULL, NULL, 0);
10297 break;
10299 CASE_FLT_FN (BUILT_IN_Y0):
10300 if (validate_arg (arg0, REAL_TYPE))
10301 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10302 &dconst0, NULL, false);
10303 break;
10305 CASE_FLT_FN (BUILT_IN_Y1):
10306 if (validate_arg (arg0, REAL_TYPE))
10307 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10308 &dconst0, NULL, false);
10309 break;
10311 CASE_FLT_FN (BUILT_IN_NAN):
10312 case BUILT_IN_NAND32:
10313 case BUILT_IN_NAND64:
10314 case BUILT_IN_NAND128:
10315 return fold_builtin_nan (arg0, type, true);
10317 CASE_FLT_FN (BUILT_IN_NANS):
10318 return fold_builtin_nan (arg0, type, false);
10320 CASE_FLT_FN (BUILT_IN_FLOOR):
10321 return fold_builtin_floor (fndecl, arg0);
10323 CASE_FLT_FN (BUILT_IN_CEIL):
10324 return fold_builtin_ceil (fndecl, arg0);
10326 CASE_FLT_FN (BUILT_IN_TRUNC):
10327 return fold_builtin_trunc (fndecl, arg0);
10329 CASE_FLT_FN (BUILT_IN_ROUND):
10330 return fold_builtin_round (fndecl, arg0);
10332 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10333 CASE_FLT_FN (BUILT_IN_RINT):
10334 return fold_trunc_transparent_mathfn (fndecl, arg0);
10336 CASE_FLT_FN (BUILT_IN_LCEIL):
10337 CASE_FLT_FN (BUILT_IN_LLCEIL):
10338 CASE_FLT_FN (BUILT_IN_LFLOOR):
10339 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10340 CASE_FLT_FN (BUILT_IN_LROUND):
10341 CASE_FLT_FN (BUILT_IN_LLROUND):
10342 return fold_builtin_int_roundingfn (fndecl, arg0);
10344 CASE_FLT_FN (BUILT_IN_LRINT):
10345 CASE_FLT_FN (BUILT_IN_LLRINT):
10346 return fold_fixed_mathfn (fndecl, arg0);
10348 case BUILT_IN_BSWAP32:
10349 case BUILT_IN_BSWAP64:
10350 return fold_builtin_bswap (fndecl, arg0);
10352 CASE_INT_FN (BUILT_IN_FFS):
10353 CASE_INT_FN (BUILT_IN_CLZ):
10354 CASE_INT_FN (BUILT_IN_CTZ):
10355 CASE_INT_FN (BUILT_IN_POPCOUNT):
10356 CASE_INT_FN (BUILT_IN_PARITY):
10357 return fold_builtin_bitop (fndecl, arg0);
10359 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10360 return fold_builtin_signbit (arg0, type);
10362 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10363 return fold_builtin_significand (arg0, type);
10365 CASE_FLT_FN (BUILT_IN_ILOGB):
10366 CASE_FLT_FN (BUILT_IN_LOGB):
10367 return fold_builtin_logb (arg0, type);
10369 case BUILT_IN_ISASCII:
10370 return fold_builtin_isascii (arg0);
10372 case BUILT_IN_TOASCII:
10373 return fold_builtin_toascii (arg0);
10375 case BUILT_IN_ISDIGIT:
10376 return fold_builtin_isdigit (arg0);
10378 CASE_FLT_FN (BUILT_IN_FINITE):
10379 case BUILT_IN_FINITED32:
10380 case BUILT_IN_FINITED64:
10381 case BUILT_IN_FINITED128:
10382 case BUILT_IN_ISFINITE:
10383 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10385 CASE_FLT_FN (BUILT_IN_ISINF):
10386 case BUILT_IN_ISINFD32:
10387 case BUILT_IN_ISINFD64:
10388 case BUILT_IN_ISINFD128:
10389 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10391 case BUILT_IN_ISINF_SIGN:
10392 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10394 CASE_FLT_FN (BUILT_IN_ISNAN):
10395 case BUILT_IN_ISNAND32:
10396 case BUILT_IN_ISNAND64:
10397 case BUILT_IN_ISNAND128:
10398 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10400 case BUILT_IN_PRINTF:
10401 case BUILT_IN_PRINTF_UNLOCKED:
10402 case BUILT_IN_VPRINTF:
10403 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10405 default:
10406 break;
10409 return NULL_TREE;
10413 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10414 IGNORE is true if the result of the function call is ignored. This
10415 function returns NULL_TREE if no simplification was possible. */
10417 static tree
10418 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10420 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10421 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10423 switch (fcode)
10425 CASE_FLT_FN (BUILT_IN_JN):
10426 if (validate_arg (arg0, INTEGER_TYPE)
10427 && validate_arg (arg1, REAL_TYPE))
10428 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10429 break;
10431 CASE_FLT_FN (BUILT_IN_YN):
10432 if (validate_arg (arg0, INTEGER_TYPE)
10433 && validate_arg (arg1, REAL_TYPE))
10434 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10435 &dconst0, false);
10436 break;
10438 CASE_FLT_FN (BUILT_IN_DREM):
10439 CASE_FLT_FN (BUILT_IN_REMAINDER):
10440 if (validate_arg (arg0, REAL_TYPE)
10441 && validate_arg(arg1, REAL_TYPE))
10442 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10443 break;
10445 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10446 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10447 if (validate_arg (arg0, REAL_TYPE)
10448 && validate_arg(arg1, POINTER_TYPE))
10449 return do_mpfr_lgamma_r (arg0, arg1, type);
10450 break;
10452 CASE_FLT_FN (BUILT_IN_ATAN2):
10453 if (validate_arg (arg0, REAL_TYPE)
10454 && validate_arg(arg1, REAL_TYPE))
10455 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10456 break;
10458 CASE_FLT_FN (BUILT_IN_FDIM):
10459 if (validate_arg (arg0, REAL_TYPE)
10460 && validate_arg(arg1, REAL_TYPE))
10461 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10462 break;
10464 CASE_FLT_FN (BUILT_IN_HYPOT):
10465 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10467 CASE_FLT_FN (BUILT_IN_LDEXP):
10468 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10469 CASE_FLT_FN (BUILT_IN_SCALBN):
10470 CASE_FLT_FN (BUILT_IN_SCALBLN):
10471 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10473 CASE_FLT_FN (BUILT_IN_FREXP):
10474 return fold_builtin_frexp (arg0, arg1, type);
10476 CASE_FLT_FN (BUILT_IN_MODF):
10477 return fold_builtin_modf (arg0, arg1, type);
10479 case BUILT_IN_BZERO:
10480 return fold_builtin_bzero (arg0, arg1, ignore);
10482 case BUILT_IN_FPUTS:
10483 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10485 case BUILT_IN_FPUTS_UNLOCKED:
10486 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10488 case BUILT_IN_STRSTR:
10489 return fold_builtin_strstr (arg0, arg1, type);
10491 case BUILT_IN_STRCAT:
10492 return fold_builtin_strcat (arg0, arg1);
10494 case BUILT_IN_STRSPN:
10495 return fold_builtin_strspn (arg0, arg1);
10497 case BUILT_IN_STRCSPN:
10498 return fold_builtin_strcspn (arg0, arg1);
10500 case BUILT_IN_STRCHR:
10501 case BUILT_IN_INDEX:
10502 return fold_builtin_strchr (arg0, arg1, type);
10504 case BUILT_IN_STRRCHR:
10505 case BUILT_IN_RINDEX:
10506 return fold_builtin_strrchr (arg0, arg1, type);
10508 case BUILT_IN_STRCPY:
10509 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10511 case BUILT_IN_STPCPY:
10512 if (ignore)
10514 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10515 if (!fn)
10516 break;
10518 return build_call_expr (fn, 2, arg0, arg1);
10520 break;
10522 case BUILT_IN_STRCMP:
10523 return fold_builtin_strcmp (arg0, arg1);
10525 case BUILT_IN_STRPBRK:
10526 return fold_builtin_strpbrk (arg0, arg1, type);
10528 case BUILT_IN_EXPECT:
10529 return fold_builtin_expect (arg0, arg1);
10531 CASE_FLT_FN (BUILT_IN_POW):
10532 return fold_builtin_pow (fndecl, arg0, arg1, type);
10534 CASE_FLT_FN (BUILT_IN_POWI):
10535 return fold_builtin_powi (fndecl, arg0, arg1, type);
10537 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10538 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10540 CASE_FLT_FN (BUILT_IN_FMIN):
10541 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10543 CASE_FLT_FN (BUILT_IN_FMAX):
10544 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10546 case BUILT_IN_ISGREATER:
10547 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10548 case BUILT_IN_ISGREATEREQUAL:
10549 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10550 case BUILT_IN_ISLESS:
10551 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10552 case BUILT_IN_ISLESSEQUAL:
10553 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10554 case BUILT_IN_ISLESSGREATER:
10555 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10556 case BUILT_IN_ISUNORDERED:
10557 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10558 NOP_EXPR);
10560 /* We do the folding for va_start in the expander. */
10561 case BUILT_IN_VA_START:
10562 break;
10564 case BUILT_IN_SPRINTF:
10565 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10567 case BUILT_IN_OBJECT_SIZE:
10568 return fold_builtin_object_size (arg0, arg1);
10570 case BUILT_IN_PRINTF:
10571 case BUILT_IN_PRINTF_UNLOCKED:
10572 case BUILT_IN_VPRINTF:
10573 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10575 case BUILT_IN_PRINTF_CHK:
10576 case BUILT_IN_VPRINTF_CHK:
10577 if (!validate_arg (arg0, INTEGER_TYPE)
10578 || TREE_SIDE_EFFECTS (arg0))
10579 return NULL_TREE;
10580 else
10581 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10582 break;
10584 case BUILT_IN_FPRINTF:
10585 case BUILT_IN_FPRINTF_UNLOCKED:
10586 case BUILT_IN_VFPRINTF:
10587 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10588 ignore, fcode);
10590 default:
10591 break;
10593 return NULL_TREE;
10596 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10597 and ARG2. IGNORE is true if the result of the function call is ignored.
10598 This function returns NULL_TREE if no simplification was possible. */
10600 static tree
10601 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10603 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10604 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10605 switch (fcode)
10608 CASE_FLT_FN (BUILT_IN_SINCOS):
10609 return fold_builtin_sincos (arg0, arg1, arg2);
10611 CASE_FLT_FN (BUILT_IN_FMA):
10612 if (validate_arg (arg0, REAL_TYPE)
10613 && validate_arg(arg1, REAL_TYPE)
10614 && validate_arg(arg2, REAL_TYPE))
10615 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10616 break;
10618 CASE_FLT_FN (BUILT_IN_REMQUO):
10619 if (validate_arg (arg0, REAL_TYPE)
10620 && validate_arg(arg1, REAL_TYPE)
10621 && validate_arg(arg2, POINTER_TYPE))
10622 return do_mpfr_remquo (arg0, arg1, arg2);
10623 break;
10625 case BUILT_IN_MEMSET:
10626 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10628 case BUILT_IN_BCOPY:
10629 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10631 case BUILT_IN_MEMCPY:
10632 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10634 case BUILT_IN_MEMPCPY:
10635 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10637 case BUILT_IN_MEMMOVE:
10638 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10640 case BUILT_IN_STRNCAT:
10641 return fold_builtin_strncat (arg0, arg1, arg2);
10643 case BUILT_IN_STRNCPY:
10644 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10646 case BUILT_IN_STRNCMP:
10647 return fold_builtin_strncmp (arg0, arg1, arg2);
10649 case BUILT_IN_MEMCHR:
10650 return fold_builtin_memchr (arg0, arg1, arg2, type);
10652 case BUILT_IN_BCMP:
10653 case BUILT_IN_MEMCMP:
10654 return fold_builtin_memcmp (arg0, arg1, arg2);;
10656 case BUILT_IN_SPRINTF:
10657 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10659 case BUILT_IN_STRCPY_CHK:
10660 case BUILT_IN_STPCPY_CHK:
10661 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10662 ignore, fcode);
10664 case BUILT_IN_STRCAT_CHK:
10665 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10667 case BUILT_IN_PRINTF_CHK:
10668 case BUILT_IN_VPRINTF_CHK:
10669 if (!validate_arg (arg0, INTEGER_TYPE)
10670 || TREE_SIDE_EFFECTS (arg0))
10671 return NULL_TREE;
10672 else
10673 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10674 break;
10676 case BUILT_IN_FPRINTF:
10677 case BUILT_IN_FPRINTF_UNLOCKED:
10678 case BUILT_IN_VFPRINTF:
10679 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10681 case BUILT_IN_FPRINTF_CHK:
10682 case BUILT_IN_VFPRINTF_CHK:
10683 if (!validate_arg (arg1, INTEGER_TYPE)
10684 || TREE_SIDE_EFFECTS (arg1))
10685 return NULL_TREE;
10686 else
10687 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10688 ignore, fcode);
10690 default:
10691 break;
10693 return NULL_TREE;
10696 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10697 ARG2, and ARG3. IGNORE is true if the result of the function call is
10698 ignored. This function returns NULL_TREE if no simplification was
10699 possible. */
10701 static tree
10702 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10703 bool ignore)
10705 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10707 switch (fcode)
10709 case BUILT_IN_MEMCPY_CHK:
10710 case BUILT_IN_MEMPCPY_CHK:
10711 case BUILT_IN_MEMMOVE_CHK:
10712 case BUILT_IN_MEMSET_CHK:
10713 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10714 NULL_TREE, ignore,
10715 DECL_FUNCTION_CODE (fndecl));
10717 case BUILT_IN_STRNCPY_CHK:
10718 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10720 case BUILT_IN_STRNCAT_CHK:
10721 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10723 case BUILT_IN_FPRINTF_CHK:
10724 case BUILT_IN_VFPRINTF_CHK:
10725 if (!validate_arg (arg1, INTEGER_TYPE)
10726 || TREE_SIDE_EFFECTS (arg1))
10727 return NULL_TREE;
10728 else
10729 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10730 ignore, fcode);
10731 break;
10733 default:
10734 break;
10736 return NULL_TREE;
10739 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10740 arguments, where NARGS <= 4. IGNORE is true if the result of the
10741 function call is ignored. This function returns NULL_TREE if no
10742 simplification was possible. Note that this only folds builtins with
10743 fixed argument patterns. Foldings that do varargs-to-varargs
10744 transformations, or that match calls with more than 4 arguments,
10745 need to be handled with fold_builtin_varargs instead. */
10747 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10749 static tree
10750 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10752 tree ret = NULL_TREE;
10754 switch (nargs)
10756 case 0:
10757 ret = fold_builtin_0 (fndecl, ignore);
10758 break;
10759 case 1:
10760 ret = fold_builtin_1 (fndecl, args[0], ignore);
10761 break;
10762 case 2:
10763 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10764 break;
10765 case 3:
10766 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10767 break;
10768 case 4:
10769 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10770 ignore);
10771 break;
10772 default:
10773 break;
10775 if (ret)
10777 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10778 TREE_NO_WARNING (ret) = 1;
10779 return ret;
10781 return NULL_TREE;
10784 /* Builtins with folding operations that operate on "..." arguments
10785 need special handling; we need to store the arguments in a convenient
10786 data structure before attempting any folding. Fortunately there are
10787 only a few builtins that fall into this category. FNDECL is the
10788 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10789 result of the function call is ignored. */
10791 static tree
10792 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10794 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10795 tree ret = NULL_TREE;
10797 switch (fcode)
10799 case BUILT_IN_SPRINTF_CHK:
10800 case BUILT_IN_VSPRINTF_CHK:
10801 ret = fold_builtin_sprintf_chk (exp, fcode);
10802 break;
10804 case BUILT_IN_SNPRINTF_CHK:
10805 case BUILT_IN_VSNPRINTF_CHK:
10806 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10807 break;
10809 case BUILT_IN_FPCLASSIFY:
10810 ret = fold_builtin_fpclassify (exp);
10811 break;
10813 default:
10814 break;
10816 if (ret)
10818 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10819 TREE_NO_WARNING (ret) = 1;
10820 return ret;
10822 return NULL_TREE;
10825 /* Return true if FNDECL shouldn't be folded right now.
10826 If a built-in function has an inline attribute always_inline
10827 wrapper, defer folding it after always_inline functions have
10828 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10829 might not be performed. */
10831 static bool
10832 avoid_folding_inline_builtin (tree fndecl)
10834 return (DECL_DECLARED_INLINE_P (fndecl)
10835 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10836 && cfun
10837 && !cfun->always_inline_functions_inlined
10838 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10841 /* A wrapper function for builtin folding that prevents warnings for
10842 "statement without effect" and the like, caused by removing the
10843 call node earlier than the warning is generated. */
10845 tree
10846 fold_call_expr (tree exp, bool ignore)
10848 tree ret = NULL_TREE;
10849 tree fndecl = get_callee_fndecl (exp);
10850 if (fndecl
10851 && TREE_CODE (fndecl) == FUNCTION_DECL
10852 && DECL_BUILT_IN (fndecl)
10853 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10854 yet. Defer folding until we see all the arguments
10855 (after inlining). */
10856 && !CALL_EXPR_VA_ARG_PACK (exp))
10858 int nargs = call_expr_nargs (exp);
10860 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10861 instead last argument is __builtin_va_arg_pack (). Defer folding
10862 even in that case, until arguments are finalized. */
10863 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10865 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10866 if (fndecl2
10867 && TREE_CODE (fndecl2) == FUNCTION_DECL
10868 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10869 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10870 return NULL_TREE;
10873 if (avoid_folding_inline_builtin (fndecl))
10874 return NULL_TREE;
10876 /* FIXME: Don't use a list in this interface. */
10877 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10878 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10879 else
10881 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10883 tree *args = CALL_EXPR_ARGP (exp);
10884 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10886 if (!ret)
10887 ret = fold_builtin_varargs (fndecl, exp, ignore);
10888 if (ret)
10890 /* Propagate location information from original call to
10891 expansion of builtin. Otherwise things like
10892 maybe_emit_chk_warning, that operate on the expansion
10893 of a builtin, will use the wrong location information. */
10894 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10896 tree realret = ret;
10897 if (TREE_CODE (ret) == NOP_EXPR)
10898 realret = TREE_OPERAND (ret, 0);
10899 if (CAN_HAVE_LOCATION_P (realret)
10900 && !EXPR_HAS_LOCATION (realret))
10901 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10902 return realret;
10904 return ret;
10908 return NULL_TREE;
10911 /* Conveniently construct a function call expression. FNDECL names the
10912 function to be called and ARGLIST is a TREE_LIST of arguments. */
10914 tree
10915 build_function_call_expr (tree fndecl, tree arglist)
10917 tree fntype = TREE_TYPE (fndecl);
10918 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10919 int n = list_length (arglist);
10920 tree *argarray = (tree *) alloca (n * sizeof (tree));
10921 int i;
10923 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10924 argarray[i] = TREE_VALUE (arglist);
10925 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10928 /* Conveniently construct a function call expression. FNDECL names the
10929 function to be called, N is the number of arguments, and the "..."
10930 parameters are the argument expressions. */
10932 tree
10933 build_call_expr (tree fndecl, int n, ...)
10935 va_list ap;
10936 tree fntype = TREE_TYPE (fndecl);
10937 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10938 tree *argarray = (tree *) alloca (n * sizeof (tree));
10939 int i;
10941 va_start (ap, n);
10942 for (i = 0; i < n; i++)
10943 argarray[i] = va_arg (ap, tree);
10944 va_end (ap);
10945 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10948 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10949 N arguments are passed in the array ARGARRAY. */
10951 tree
10952 fold_builtin_call_array (tree type,
10953 tree fn,
10954 int n,
10955 tree *argarray)
10957 tree ret = NULL_TREE;
10958 int i;
10959 tree exp;
10961 if (TREE_CODE (fn) == ADDR_EXPR)
10963 tree fndecl = TREE_OPERAND (fn, 0);
10964 if (TREE_CODE (fndecl) == FUNCTION_DECL
10965 && DECL_BUILT_IN (fndecl))
10967 /* If last argument is __builtin_va_arg_pack (), arguments to this
10968 function are not finalized yet. Defer folding until they are. */
10969 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10971 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10972 if (fndecl2
10973 && TREE_CODE (fndecl2) == FUNCTION_DECL
10974 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10975 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10976 return build_call_array (type, fn, n, argarray);
10978 if (avoid_folding_inline_builtin (fndecl))
10979 return build_call_array (type, fn, n, argarray);
10980 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10982 tree arglist = NULL_TREE;
10983 for (i = n - 1; i >= 0; i--)
10984 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10985 ret = targetm.fold_builtin (fndecl, arglist, false);
10986 if (ret)
10987 return ret;
10988 return build_call_array (type, fn, n, argarray);
10990 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10992 /* First try the transformations that don't require consing up
10993 an exp. */
10994 ret = fold_builtin_n (fndecl, argarray, n, false);
10995 if (ret)
10996 return ret;
10999 /* If we got this far, we need to build an exp. */
11000 exp = build_call_array (type, fn, n, argarray);
11001 ret = fold_builtin_varargs (fndecl, exp, false);
11002 return ret ? ret : exp;
11006 return build_call_array (type, fn, n, argarray);
11009 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11010 along with N new arguments specified as the "..." parameters. SKIP
11011 is the number of arguments in EXP to be omitted. This function is used
11012 to do varargs-to-varargs transformations. */
11014 static tree
11015 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
11017 int oldnargs = call_expr_nargs (exp);
11018 int nargs = oldnargs - skip + n;
11019 tree fntype = TREE_TYPE (fndecl);
11020 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11021 tree *buffer;
11023 if (n > 0)
11025 int i, j;
11026 va_list ap;
11028 buffer = XALLOCAVEC (tree, nargs);
11029 va_start (ap, n);
11030 for (i = 0; i < n; i++)
11031 buffer[i] = va_arg (ap, tree);
11032 va_end (ap);
11033 for (j = skip; j < oldnargs; j++, i++)
11034 buffer[i] = CALL_EXPR_ARG (exp, j);
11036 else
11037 buffer = CALL_EXPR_ARGP (exp) + skip;
11039 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
11042 /* Validate a single argument ARG against a tree code CODE representing
11043 a type. */
11045 static bool
11046 validate_arg (const_tree arg, enum tree_code code)
11048 if (!arg)
11049 return false;
11050 else if (code == POINTER_TYPE)
11051 return POINTER_TYPE_P (TREE_TYPE (arg));
11052 else if (code == INTEGER_TYPE)
11053 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11054 return code == TREE_CODE (TREE_TYPE (arg));
11057 /* This function validates the types of a function call argument list
11058 against a specified list of tree_codes. If the last specifier is a 0,
11059 that represents an ellipses, otherwise the last specifier must be a
11060 VOID_TYPE.
11062 This is the GIMPLE version of validate_arglist. Eventually we want to
11063 completely convert builtins.c to work from GIMPLEs and the tree based
11064 validate_arglist will then be removed. */
11066 bool
11067 validate_gimple_arglist (const_gimple call, ...)
11069 enum tree_code code;
11070 bool res = 0;
11071 va_list ap;
11072 const_tree arg;
11073 size_t i;
11075 va_start (ap, call);
11076 i = 0;
11080 code = va_arg (ap, enum tree_code);
11081 switch (code)
11083 case 0:
11084 /* This signifies an ellipses, any further arguments are all ok. */
11085 res = true;
11086 goto end;
11087 case VOID_TYPE:
11088 /* This signifies an endlink, if no arguments remain, return
11089 true, otherwise return false. */
11090 res = (i == gimple_call_num_args (call));
11091 goto end;
11092 default:
11093 /* If no parameters remain or the parameter's code does not
11094 match the specified code, return false. Otherwise continue
11095 checking any remaining arguments. */
11096 arg = gimple_call_arg (call, i++);
11097 if (!validate_arg (arg, code))
11098 goto end;
11099 break;
11102 while (1);
11104 /* We need gotos here since we can only have one VA_CLOSE in a
11105 function. */
11106 end: ;
11107 va_end (ap);
11109 return res;
11112 /* This function validates the types of a function call argument list
11113 against a specified list of tree_codes. If the last specifier is a 0,
11114 that represents an ellipses, otherwise the last specifier must be a
11115 VOID_TYPE. */
11117 bool
11118 validate_arglist (const_tree callexpr, ...)
11120 enum tree_code code;
11121 bool res = 0;
11122 va_list ap;
11123 const_call_expr_arg_iterator iter;
11124 const_tree arg;
11126 va_start (ap, callexpr);
11127 init_const_call_expr_arg_iterator (callexpr, &iter);
11131 code = va_arg (ap, enum tree_code);
11132 switch (code)
11134 case 0:
11135 /* This signifies an ellipses, any further arguments are all ok. */
11136 res = true;
11137 goto end;
11138 case VOID_TYPE:
11139 /* This signifies an endlink, if no arguments remain, return
11140 true, otherwise return false. */
11141 res = !more_const_call_expr_args_p (&iter);
11142 goto end;
11143 default:
11144 /* If no parameters remain or the parameter's code does not
11145 match the specified code, return false. Otherwise continue
11146 checking any remaining arguments. */
11147 arg = next_const_call_expr_arg (&iter);
11148 if (!validate_arg (arg, code))
11149 goto end;
11150 break;
11153 while (1);
11155 /* We need gotos here since we can only have one VA_CLOSE in a
11156 function. */
11157 end: ;
11158 va_end (ap);
11160 return res;
11163 /* Default target-specific builtin expander that does nothing. */
11166 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11167 rtx target ATTRIBUTE_UNUSED,
11168 rtx subtarget ATTRIBUTE_UNUSED,
11169 enum machine_mode mode ATTRIBUTE_UNUSED,
11170 int ignore ATTRIBUTE_UNUSED)
11172 return NULL_RTX;
11175 /* Returns true is EXP represents data that would potentially reside
11176 in a readonly section. */
11178 static bool
11179 readonly_data_expr (tree exp)
11181 STRIP_NOPS (exp);
11183 if (TREE_CODE (exp) != ADDR_EXPR)
11184 return false;
11186 exp = get_base_address (TREE_OPERAND (exp, 0));
11187 if (!exp)
11188 return false;
11190 /* Make sure we call decl_readonly_section only for trees it
11191 can handle (since it returns true for everything it doesn't
11192 understand). */
11193 if (TREE_CODE (exp) == STRING_CST
11194 || TREE_CODE (exp) == CONSTRUCTOR
11195 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11196 return decl_readonly_section (exp, 0);
11197 else
11198 return false;
11201 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11202 to the call, and TYPE is its return type.
11204 Return NULL_TREE if no simplification was possible, otherwise return the
11205 simplified form of the call as a tree.
11207 The simplified form may be a constant or other expression which
11208 computes the same value, but in a more efficient manner (including
11209 calls to other builtin functions).
11211 The call may contain arguments which need to be evaluated, but
11212 which are not useful to determine the result of the call. In
11213 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11214 COMPOUND_EXPR will be an argument which must be evaluated.
11215 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11216 COMPOUND_EXPR in the chain will contain the tree for the simplified
11217 form of the builtin function call. */
11219 static tree
11220 fold_builtin_strstr (tree s1, tree s2, tree type)
11222 if (!validate_arg (s1, POINTER_TYPE)
11223 || !validate_arg (s2, POINTER_TYPE))
11224 return NULL_TREE;
11225 else
11227 tree fn;
11228 const char *p1, *p2;
11230 p2 = c_getstr (s2);
11231 if (p2 == NULL)
11232 return NULL_TREE;
11234 p1 = c_getstr (s1);
11235 if (p1 != NULL)
11237 const char *r = strstr (p1, p2);
11238 tree tem;
11240 if (r == NULL)
11241 return build_int_cst (TREE_TYPE (s1), 0);
11243 /* Return an offset into the constant string argument. */
11244 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11245 s1, size_int (r - p1));
11246 return fold_convert (type, tem);
11249 /* The argument is const char *, and the result is char *, so we need
11250 a type conversion here to avoid a warning. */
11251 if (p2[0] == '\0')
11252 return fold_convert (type, s1);
11254 if (p2[1] != '\0')
11255 return NULL_TREE;
11257 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11258 if (!fn)
11259 return NULL_TREE;
11261 /* New argument list transforming strstr(s1, s2) to
11262 strchr(s1, s2[0]). */
11263 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11267 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11268 the call, and TYPE is its return type.
11270 Return NULL_TREE if no simplification was possible, otherwise return the
11271 simplified form of the call as a tree.
11273 The simplified form may be a constant or other expression which
11274 computes the same value, but in a more efficient manner (including
11275 calls to other builtin functions).
11277 The call may contain arguments which need to be evaluated, but
11278 which are not useful to determine the result of the call. In
11279 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11280 COMPOUND_EXPR will be an argument which must be evaluated.
11281 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11282 COMPOUND_EXPR in the chain will contain the tree for the simplified
11283 form of the builtin function call. */
11285 static tree
11286 fold_builtin_strchr (tree s1, tree s2, tree type)
11288 if (!validate_arg (s1, POINTER_TYPE)
11289 || !validate_arg (s2, INTEGER_TYPE))
11290 return NULL_TREE;
11291 else
11293 const char *p1;
11295 if (TREE_CODE (s2) != INTEGER_CST)
11296 return NULL_TREE;
11298 p1 = c_getstr (s1);
11299 if (p1 != NULL)
11301 char c;
11302 const char *r;
11303 tree tem;
11305 if (target_char_cast (s2, &c))
11306 return NULL_TREE;
11308 r = strchr (p1, c);
11310 if (r == NULL)
11311 return build_int_cst (TREE_TYPE (s1), 0);
11313 /* Return an offset into the constant string argument. */
11314 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11315 s1, size_int (r - p1));
11316 return fold_convert (type, tem);
11318 return NULL_TREE;
11322 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11323 the call, and TYPE is its return type.
11325 Return NULL_TREE if no simplification was possible, otherwise return the
11326 simplified form of the call as a tree.
11328 The simplified form may be a constant or other expression which
11329 computes the same value, but in a more efficient manner (including
11330 calls to other builtin functions).
11332 The call may contain arguments which need to be evaluated, but
11333 which are not useful to determine the result of the call. In
11334 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11335 COMPOUND_EXPR will be an argument which must be evaluated.
11336 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11337 COMPOUND_EXPR in the chain will contain the tree for the simplified
11338 form of the builtin function call. */
11340 static tree
11341 fold_builtin_strrchr (tree s1, tree s2, tree type)
11343 if (!validate_arg (s1, POINTER_TYPE)
11344 || !validate_arg (s2, INTEGER_TYPE))
11345 return NULL_TREE;
11346 else
11348 tree fn;
11349 const char *p1;
11351 if (TREE_CODE (s2) != INTEGER_CST)
11352 return NULL_TREE;
11354 p1 = c_getstr (s1);
11355 if (p1 != NULL)
11357 char c;
11358 const char *r;
11359 tree tem;
11361 if (target_char_cast (s2, &c))
11362 return NULL_TREE;
11364 r = strrchr (p1, c);
11366 if (r == NULL)
11367 return build_int_cst (TREE_TYPE (s1), 0);
11369 /* Return an offset into the constant string argument. */
11370 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11371 s1, size_int (r - p1));
11372 return fold_convert (type, tem);
11375 if (! integer_zerop (s2))
11376 return NULL_TREE;
11378 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11379 if (!fn)
11380 return NULL_TREE;
11382 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11383 return build_call_expr (fn, 2, s1, s2);
11387 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11388 to the call, and TYPE is its return type.
11390 Return NULL_TREE if no simplification was possible, otherwise return the
11391 simplified form of the call as a tree.
11393 The simplified form may be a constant or other expression which
11394 computes the same value, but in a more efficient manner (including
11395 calls to other builtin functions).
11397 The call may contain arguments which need to be evaluated, but
11398 which are not useful to determine the result of the call. In
11399 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11400 COMPOUND_EXPR will be an argument which must be evaluated.
11401 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11402 COMPOUND_EXPR in the chain will contain the tree for the simplified
11403 form of the builtin function call. */
11405 static tree
11406 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11408 if (!validate_arg (s1, POINTER_TYPE)
11409 || !validate_arg (s2, POINTER_TYPE))
11410 return NULL_TREE;
11411 else
11413 tree fn;
11414 const char *p1, *p2;
11416 p2 = c_getstr (s2);
11417 if (p2 == NULL)
11418 return NULL_TREE;
11420 p1 = c_getstr (s1);
11421 if (p1 != NULL)
11423 const char *r = strpbrk (p1, p2);
11424 tree tem;
11426 if (r == NULL)
11427 return build_int_cst (TREE_TYPE (s1), 0);
11429 /* Return an offset into the constant string argument. */
11430 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11431 s1, size_int (r - p1));
11432 return fold_convert (type, tem);
11435 if (p2[0] == '\0')
11436 /* strpbrk(x, "") == NULL.
11437 Evaluate and ignore s1 in case it had side-effects. */
11438 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11440 if (p2[1] != '\0')
11441 return NULL_TREE; /* Really call strpbrk. */
11443 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11444 if (!fn)
11445 return NULL_TREE;
11447 /* New argument list transforming strpbrk(s1, s2) to
11448 strchr(s1, s2[0]). */
11449 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11453 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11454 to the call.
11456 Return NULL_TREE if no simplification was possible, otherwise return the
11457 simplified form of the call as a tree.
11459 The simplified form may be a constant or other expression which
11460 computes the same value, but in a more efficient manner (including
11461 calls to other builtin functions).
11463 The call may contain arguments which need to be evaluated, but
11464 which are not useful to determine the result of the call. In
11465 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11466 COMPOUND_EXPR will be an argument which must be evaluated.
11467 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11468 COMPOUND_EXPR in the chain will contain the tree for the simplified
11469 form of the builtin function call. */
11471 static tree
11472 fold_builtin_strcat (tree dst, tree src)
11474 if (!validate_arg (dst, POINTER_TYPE)
11475 || !validate_arg (src, POINTER_TYPE))
11476 return NULL_TREE;
11477 else
11479 const char *p = c_getstr (src);
11481 /* If the string length is zero, return the dst parameter. */
11482 if (p && *p == '\0')
11483 return dst;
11485 return NULL_TREE;
11489 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11490 arguments to the call.
11492 Return NULL_TREE if no simplification was possible, otherwise return the
11493 simplified form of the call as a tree.
11495 The simplified form may be a constant or other expression which
11496 computes the same value, but in a more efficient manner (including
11497 calls to other builtin functions).
11499 The call may contain arguments which need to be evaluated, but
11500 which are not useful to determine the result of the call. In
11501 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11502 COMPOUND_EXPR will be an argument which must be evaluated.
11503 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11504 COMPOUND_EXPR in the chain will contain the tree for the simplified
11505 form of the builtin function call. */
11507 static tree
11508 fold_builtin_strncat (tree dst, tree src, tree len)
11510 if (!validate_arg (dst, POINTER_TYPE)
11511 || !validate_arg (src, POINTER_TYPE)
11512 || !validate_arg (len, INTEGER_TYPE))
11513 return NULL_TREE;
11514 else
11516 const char *p = c_getstr (src);
11518 /* If the requested length is zero, or the src parameter string
11519 length is zero, return the dst parameter. */
11520 if (integer_zerop (len) || (p && *p == '\0'))
11521 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11523 /* If the requested len is greater than or equal to the string
11524 length, call strcat. */
11525 if (TREE_CODE (len) == INTEGER_CST && p
11526 && compare_tree_int (len, strlen (p)) >= 0)
11528 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11530 /* If the replacement _DECL isn't initialized, don't do the
11531 transformation. */
11532 if (!fn)
11533 return NULL_TREE;
11535 return build_call_expr (fn, 2, dst, src);
11537 return NULL_TREE;
11541 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11542 to the call.
11544 Return NULL_TREE if no simplification was possible, otherwise return the
11545 simplified form of the call as a tree.
11547 The simplified form may be a constant or other expression which
11548 computes the same value, but in a more efficient manner (including
11549 calls to other builtin functions).
11551 The call may contain arguments which need to be evaluated, but
11552 which are not useful to determine the result of the call. In
11553 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11554 COMPOUND_EXPR will be an argument which must be evaluated.
11555 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11556 COMPOUND_EXPR in the chain will contain the tree for the simplified
11557 form of the builtin function call. */
11559 static tree
11560 fold_builtin_strspn (tree s1, tree s2)
11562 if (!validate_arg (s1, POINTER_TYPE)
11563 || !validate_arg (s2, POINTER_TYPE))
11564 return NULL_TREE;
11565 else
11567 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11569 /* If both arguments are constants, evaluate at compile-time. */
11570 if (p1 && p2)
11572 const size_t r = strspn (p1, p2);
11573 return size_int (r);
11576 /* If either argument is "", return NULL_TREE. */
11577 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11578 /* Evaluate and ignore both arguments in case either one has
11579 side-effects. */
11580 return omit_two_operands (size_type_node, size_zero_node,
11581 s1, s2);
11582 return NULL_TREE;
11586 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11587 to the call.
11589 Return NULL_TREE if no simplification was possible, otherwise return the
11590 simplified form of the call as a tree.
11592 The simplified form may be a constant or other expression which
11593 computes the same value, but in a more efficient manner (including
11594 calls to other builtin functions).
11596 The call may contain arguments which need to be evaluated, but
11597 which are not useful to determine the result of the call. In
11598 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11599 COMPOUND_EXPR will be an argument which must be evaluated.
11600 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11601 COMPOUND_EXPR in the chain will contain the tree for the simplified
11602 form of the builtin function call. */
11604 static tree
11605 fold_builtin_strcspn (tree s1, tree s2)
11607 if (!validate_arg (s1, POINTER_TYPE)
11608 || !validate_arg (s2, POINTER_TYPE))
11609 return NULL_TREE;
11610 else
11612 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11614 /* If both arguments are constants, evaluate at compile-time. */
11615 if (p1 && p2)
11617 const size_t r = strcspn (p1, p2);
11618 return size_int (r);
11621 /* If the first argument is "", return NULL_TREE. */
11622 if (p1 && *p1 == '\0')
11624 /* Evaluate and ignore argument s2 in case it has
11625 side-effects. */
11626 return omit_one_operand (size_type_node,
11627 size_zero_node, s2);
11630 /* If the second argument is "", return __builtin_strlen(s1). */
11631 if (p2 && *p2 == '\0')
11633 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11635 /* If the replacement _DECL isn't initialized, don't do the
11636 transformation. */
11637 if (!fn)
11638 return NULL_TREE;
11640 return build_call_expr (fn, 1, s1);
11642 return NULL_TREE;
11646 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11647 to the call. IGNORE is true if the value returned
11648 by the builtin will be ignored. UNLOCKED is true is true if this
11649 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11650 the known length of the string. Return NULL_TREE if no simplification
11651 was possible. */
11653 tree
11654 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11656 /* If we're using an unlocked function, assume the other unlocked
11657 functions exist explicitly. */
11658 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11659 : implicit_built_in_decls[BUILT_IN_FPUTC];
11660 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11661 : implicit_built_in_decls[BUILT_IN_FWRITE];
11663 /* If the return value is used, don't do the transformation. */
11664 if (!ignore)
11665 return NULL_TREE;
11667 /* Verify the arguments in the original call. */
11668 if (!validate_arg (arg0, POINTER_TYPE)
11669 || !validate_arg (arg1, POINTER_TYPE))
11670 return NULL_TREE;
11672 if (! len)
11673 len = c_strlen (arg0, 0);
11675 /* Get the length of the string passed to fputs. If the length
11676 can't be determined, punt. */
11677 if (!len
11678 || TREE_CODE (len) != INTEGER_CST)
11679 return NULL_TREE;
11681 switch (compare_tree_int (len, 1))
11683 case -1: /* length is 0, delete the call entirely . */
11684 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11686 case 0: /* length is 1, call fputc. */
11688 const char *p = c_getstr (arg0);
11690 if (p != NULL)
11692 if (fn_fputc)
11693 return build_call_expr (fn_fputc, 2,
11694 build_int_cst (NULL_TREE, p[0]), arg1);
11695 else
11696 return NULL_TREE;
11699 /* FALLTHROUGH */
11700 case 1: /* length is greater than 1, call fwrite. */
11702 /* If optimizing for size keep fputs. */
11703 if (optimize_function_for_size_p (cfun))
11704 return NULL_TREE;
11705 /* New argument list transforming fputs(string, stream) to
11706 fwrite(string, 1, len, stream). */
11707 if (fn_fwrite)
11708 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11709 else
11710 return NULL_TREE;
11712 default:
11713 gcc_unreachable ();
11715 return NULL_TREE;
11718 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11719 produced. False otherwise. This is done so that we don't output the error
11720 or warning twice or three times. */
11722 bool
11723 fold_builtin_next_arg (tree exp, bool va_start_p)
11725 tree fntype = TREE_TYPE (current_function_decl);
11726 int nargs = call_expr_nargs (exp);
11727 tree arg;
11729 if (TYPE_ARG_TYPES (fntype) == 0
11730 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11731 == void_type_node))
11733 error ("%<va_start%> used in function with fixed args");
11734 return true;
11737 if (va_start_p)
11739 if (va_start_p && (nargs != 2))
11741 error ("wrong number of arguments to function %<va_start%>");
11742 return true;
11744 arg = CALL_EXPR_ARG (exp, 1);
11746 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11747 when we checked the arguments and if needed issued a warning. */
11748 else
11750 if (nargs == 0)
11752 /* Evidently an out of date version of <stdarg.h>; can't validate
11753 va_start's second argument, but can still work as intended. */
11754 warning (0, "%<__builtin_next_arg%> called without an argument");
11755 return true;
11757 else if (nargs > 1)
11759 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11760 return true;
11762 arg = CALL_EXPR_ARG (exp, 0);
11765 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11766 or __builtin_next_arg (0) the first time we see it, after checking
11767 the arguments and if needed issuing a warning. */
11768 if (!integer_zerop (arg))
11770 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11772 /* Strip off all nops for the sake of the comparison. This
11773 is not quite the same as STRIP_NOPS. It does more.
11774 We must also strip off INDIRECT_EXPR for C++ reference
11775 parameters. */
11776 while (CONVERT_EXPR_P (arg)
11777 || TREE_CODE (arg) == INDIRECT_REF)
11778 arg = TREE_OPERAND (arg, 0);
11779 if (arg != last_parm)
11781 /* FIXME: Sometimes with the tree optimizers we can get the
11782 not the last argument even though the user used the last
11783 argument. We just warn and set the arg to be the last
11784 argument so that we will get wrong-code because of
11785 it. */
11786 warning (0, "second parameter of %<va_start%> not last named argument");
11789 /* Undefined by C99 7.15.1.4p4 (va_start):
11790 "If the parameter parmN is declared with the register storage
11791 class, with a function or array type, or with a type that is
11792 not compatible with the type that results after application of
11793 the default argument promotions, the behavior is undefined."
11795 else if (DECL_REGISTER (arg))
11796 warning (0, "undefined behaviour when second parameter of "
11797 "%<va_start%> is declared with %<register%> storage");
11799 /* We want to verify the second parameter just once before the tree
11800 optimizers are run and then avoid keeping it in the tree,
11801 as otherwise we could warn even for correct code like:
11802 void foo (int i, ...)
11803 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11804 if (va_start_p)
11805 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11806 else
11807 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11809 return false;
11813 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11814 ORIG may be null if this is a 2-argument call. We don't attempt to
11815 simplify calls with more than 3 arguments.
11817 Return NULL_TREE if no simplification was possible, otherwise return the
11818 simplified form of the call as a tree. If IGNORED is true, it means that
11819 the caller does not use the returned value of the function. */
11821 static tree
11822 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11824 tree call, retval;
11825 const char *fmt_str = NULL;
11827 /* Verify the required arguments in the original call. We deal with two
11828 types of sprintf() calls: 'sprintf (str, fmt)' and
11829 'sprintf (dest, "%s", orig)'. */
11830 if (!validate_arg (dest, POINTER_TYPE)
11831 || !validate_arg (fmt, POINTER_TYPE))
11832 return NULL_TREE;
11833 if (orig && !validate_arg (orig, POINTER_TYPE))
11834 return NULL_TREE;
11836 /* Check whether the format is a literal string constant. */
11837 fmt_str = c_getstr (fmt);
11838 if (fmt_str == NULL)
11839 return NULL_TREE;
11841 call = NULL_TREE;
11842 retval = NULL_TREE;
11844 if (!init_target_chars ())
11845 return NULL_TREE;
11847 /* If the format doesn't contain % args or %%, use strcpy. */
11848 if (strchr (fmt_str, target_percent) == NULL)
11850 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11852 if (!fn)
11853 return NULL_TREE;
11855 /* Don't optimize sprintf (buf, "abc", ptr++). */
11856 if (orig)
11857 return NULL_TREE;
11859 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11860 'format' is known to contain no % formats. */
11861 call = build_call_expr (fn, 2, dest, fmt);
11862 if (!ignored)
11863 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11866 /* If the format is "%s", use strcpy if the result isn't used. */
11867 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11869 tree fn;
11870 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11872 if (!fn)
11873 return NULL_TREE;
11875 /* Don't crash on sprintf (str1, "%s"). */
11876 if (!orig)
11877 return NULL_TREE;
11879 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11880 if (!ignored)
11882 retval = c_strlen (orig, 1);
11883 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11884 return NULL_TREE;
11886 call = build_call_expr (fn, 2, dest, orig);
11889 if (call && retval)
11891 retval = fold_convert
11892 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11893 retval);
11894 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11896 else
11897 return call;
11900 /* Expand a call EXP to __builtin_object_size. */
11903 expand_builtin_object_size (tree exp)
11905 tree ost;
11906 int object_size_type;
11907 tree fndecl = get_callee_fndecl (exp);
11909 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11911 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11912 exp, fndecl);
11913 expand_builtin_trap ();
11914 return const0_rtx;
11917 ost = CALL_EXPR_ARG (exp, 1);
11918 STRIP_NOPS (ost);
11920 if (TREE_CODE (ost) != INTEGER_CST
11921 || tree_int_cst_sgn (ost) < 0
11922 || compare_tree_int (ost, 3) > 0)
11924 error ("%Klast argument of %D is not integer constant between 0 and 3",
11925 exp, fndecl);
11926 expand_builtin_trap ();
11927 return const0_rtx;
11930 object_size_type = tree_low_cst (ost, 0);
11932 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11935 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11936 FCODE is the BUILT_IN_* to use.
11937 Return NULL_RTX if we failed; the caller should emit a normal call,
11938 otherwise try to get the result in TARGET, if convenient (and in
11939 mode MODE if that's convenient). */
11941 static rtx
11942 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11943 enum built_in_function fcode)
11945 tree dest, src, len, size;
11947 if (!validate_arglist (exp,
11948 POINTER_TYPE,
11949 fcode == BUILT_IN_MEMSET_CHK
11950 ? INTEGER_TYPE : POINTER_TYPE,
11951 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11952 return NULL_RTX;
11954 dest = CALL_EXPR_ARG (exp, 0);
11955 src = CALL_EXPR_ARG (exp, 1);
11956 len = CALL_EXPR_ARG (exp, 2);
11957 size = CALL_EXPR_ARG (exp, 3);
11959 if (! host_integerp (size, 1))
11960 return NULL_RTX;
11962 if (host_integerp (len, 1) || integer_all_onesp (size))
11964 tree fn;
11966 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11968 warning_at (tree_nonartificial_location (exp),
11969 0, "%Kcall to %D will always overflow destination buffer",
11970 exp, get_callee_fndecl (exp));
11971 return NULL_RTX;
11974 fn = NULL_TREE;
11975 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11976 mem{cpy,pcpy,move,set} is available. */
11977 switch (fcode)
11979 case BUILT_IN_MEMCPY_CHK:
11980 fn = built_in_decls[BUILT_IN_MEMCPY];
11981 break;
11982 case BUILT_IN_MEMPCPY_CHK:
11983 fn = built_in_decls[BUILT_IN_MEMPCPY];
11984 break;
11985 case BUILT_IN_MEMMOVE_CHK:
11986 fn = built_in_decls[BUILT_IN_MEMMOVE];
11987 break;
11988 case BUILT_IN_MEMSET_CHK:
11989 fn = built_in_decls[BUILT_IN_MEMSET];
11990 break;
11991 default:
11992 break;
11995 if (! fn)
11996 return NULL_RTX;
11998 fn = build_call_expr (fn, 3, dest, src, len);
11999 STRIP_TYPE_NOPS (fn);
12000 while (TREE_CODE (fn) == COMPOUND_EXPR)
12002 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12003 EXPAND_NORMAL);
12004 fn = TREE_OPERAND (fn, 1);
12006 if (TREE_CODE (fn) == CALL_EXPR)
12007 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12008 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12010 else if (fcode == BUILT_IN_MEMSET_CHK)
12011 return NULL_RTX;
12012 else
12014 unsigned int dest_align
12015 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12017 /* If DEST is not a pointer type, call the normal function. */
12018 if (dest_align == 0)
12019 return NULL_RTX;
12021 /* If SRC and DEST are the same (and not volatile), do nothing. */
12022 if (operand_equal_p (src, dest, 0))
12024 tree expr;
12026 if (fcode != BUILT_IN_MEMPCPY_CHK)
12028 /* Evaluate and ignore LEN in case it has side-effects. */
12029 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12030 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12033 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12034 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12037 /* __memmove_chk special case. */
12038 if (fcode == BUILT_IN_MEMMOVE_CHK)
12040 unsigned int src_align
12041 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12043 if (src_align == 0)
12044 return NULL_RTX;
12046 /* If src is categorized for a readonly section we can use
12047 normal __memcpy_chk. */
12048 if (readonly_data_expr (src))
12050 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12051 if (!fn)
12052 return NULL_RTX;
12053 fn = build_call_expr (fn, 4, dest, src, len, size);
12054 STRIP_TYPE_NOPS (fn);
12055 while (TREE_CODE (fn) == COMPOUND_EXPR)
12057 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12058 EXPAND_NORMAL);
12059 fn = TREE_OPERAND (fn, 1);
12061 if (TREE_CODE (fn) == CALL_EXPR)
12062 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12063 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12066 return NULL_RTX;
12070 /* Emit warning if a buffer overflow is detected at compile time. */
12072 static void
12073 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12075 int is_strlen = 0;
12076 tree len, size;
12077 location_t loc = tree_nonartificial_location (exp);
12079 switch (fcode)
12081 case BUILT_IN_STRCPY_CHK:
12082 case BUILT_IN_STPCPY_CHK:
12083 /* For __strcat_chk the warning will be emitted only if overflowing
12084 by at least strlen (dest) + 1 bytes. */
12085 case BUILT_IN_STRCAT_CHK:
12086 len = CALL_EXPR_ARG (exp, 1);
12087 size = CALL_EXPR_ARG (exp, 2);
12088 is_strlen = 1;
12089 break;
12090 case BUILT_IN_STRNCAT_CHK:
12091 case BUILT_IN_STRNCPY_CHK:
12092 len = CALL_EXPR_ARG (exp, 2);
12093 size = CALL_EXPR_ARG (exp, 3);
12094 break;
12095 case BUILT_IN_SNPRINTF_CHK:
12096 case BUILT_IN_VSNPRINTF_CHK:
12097 len = CALL_EXPR_ARG (exp, 1);
12098 size = CALL_EXPR_ARG (exp, 3);
12099 break;
12100 default:
12101 gcc_unreachable ();
12104 if (!len || !size)
12105 return;
12107 if (! host_integerp (size, 1) || integer_all_onesp (size))
12108 return;
12110 if (is_strlen)
12112 len = c_strlen (len, 1);
12113 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12114 return;
12116 else if (fcode == BUILT_IN_STRNCAT_CHK)
12118 tree src = CALL_EXPR_ARG (exp, 1);
12119 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12120 return;
12121 src = c_strlen (src, 1);
12122 if (! src || ! host_integerp (src, 1))
12124 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12125 exp, get_callee_fndecl (exp));
12126 return;
12128 else if (tree_int_cst_lt (src, size))
12129 return;
12131 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12132 return;
12134 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12135 exp, get_callee_fndecl (exp));
12138 /* Emit warning if a buffer overflow is detected at compile time
12139 in __sprintf_chk/__vsprintf_chk calls. */
12141 static void
12142 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12144 tree dest, size, len, fmt, flag;
12145 const char *fmt_str;
12146 int nargs = call_expr_nargs (exp);
12148 /* Verify the required arguments in the original call. */
12150 if (nargs < 4)
12151 return;
12152 dest = CALL_EXPR_ARG (exp, 0);
12153 flag = CALL_EXPR_ARG (exp, 1);
12154 size = CALL_EXPR_ARG (exp, 2);
12155 fmt = CALL_EXPR_ARG (exp, 3);
12157 if (! host_integerp (size, 1) || integer_all_onesp (size))
12158 return;
12160 /* Check whether the format is a literal string constant. */
12161 fmt_str = c_getstr (fmt);
12162 if (fmt_str == NULL)
12163 return;
12165 if (!init_target_chars ())
12166 return;
12168 /* If the format doesn't contain % args or %%, we know its size. */
12169 if (strchr (fmt_str, target_percent) == 0)
12170 len = build_int_cstu (size_type_node, strlen (fmt_str));
12171 /* If the format is "%s" and first ... argument is a string literal,
12172 we know it too. */
12173 else if (fcode == BUILT_IN_SPRINTF_CHK
12174 && strcmp (fmt_str, target_percent_s) == 0)
12176 tree arg;
12178 if (nargs < 5)
12179 return;
12180 arg = CALL_EXPR_ARG (exp, 4);
12181 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12182 return;
12184 len = c_strlen (arg, 1);
12185 if (!len || ! host_integerp (len, 1))
12186 return;
12188 else
12189 return;
12191 if (! tree_int_cst_lt (len, size))
12192 warning_at (tree_nonartificial_location (exp),
12193 0, "%Kcall to %D will always overflow destination buffer",
12194 exp, get_callee_fndecl (exp));
12197 /* Emit warning if a free is called with address of a variable. */
12199 static void
12200 maybe_emit_free_warning (tree exp)
12202 tree arg = CALL_EXPR_ARG (exp, 0);
12204 STRIP_NOPS (arg);
12205 if (TREE_CODE (arg) != ADDR_EXPR)
12206 return;
12208 arg = get_base_address (TREE_OPERAND (arg, 0));
12209 if (arg == NULL || INDIRECT_REF_P (arg))
12210 return;
12212 if (SSA_VAR_P (arg))
12213 warning_at (tree_nonartificial_location (exp),
12214 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12215 else
12216 warning_at (tree_nonartificial_location (exp),
12217 0, "%Kattempt to free a non-heap object", exp);
12220 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12221 if possible. */
12223 tree
12224 fold_builtin_object_size (tree ptr, tree ost)
12226 tree ret = NULL_TREE;
12227 int object_size_type;
12229 if (!validate_arg (ptr, POINTER_TYPE)
12230 || !validate_arg (ost, INTEGER_TYPE))
12231 return NULL_TREE;
12233 STRIP_NOPS (ost);
12235 if (TREE_CODE (ost) != INTEGER_CST
12236 || tree_int_cst_sgn (ost) < 0
12237 || compare_tree_int (ost, 3) > 0)
12238 return NULL_TREE;
12240 object_size_type = tree_low_cst (ost, 0);
12242 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12243 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12244 and (size_t) 0 for types 2 and 3. */
12245 if (TREE_SIDE_EFFECTS (ptr))
12246 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12248 if (TREE_CODE (ptr) == ADDR_EXPR)
12249 ret = build_int_cstu (size_type_node,
12250 compute_builtin_object_size (ptr, object_size_type));
12252 else if (TREE_CODE (ptr) == SSA_NAME)
12254 unsigned HOST_WIDE_INT bytes;
12256 /* If object size is not known yet, delay folding until
12257 later. Maybe subsequent passes will help determining
12258 it. */
12259 bytes = compute_builtin_object_size (ptr, object_size_type);
12260 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12261 ? -1 : 0))
12262 ret = build_int_cstu (size_type_node, bytes);
12265 if (ret)
12267 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12268 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12269 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12270 ret = NULL_TREE;
12273 return ret;
12276 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12277 DEST, SRC, LEN, and SIZE are the arguments to the call.
12278 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12279 code of the builtin. If MAXLEN is not NULL, it is maximum length
12280 passed as third argument. */
12282 tree
12283 fold_builtin_memory_chk (tree fndecl,
12284 tree dest, tree src, tree len, tree size,
12285 tree maxlen, bool ignore,
12286 enum built_in_function fcode)
12288 tree fn;
12290 if (!validate_arg (dest, POINTER_TYPE)
12291 || !validate_arg (src,
12292 (fcode == BUILT_IN_MEMSET_CHK
12293 ? INTEGER_TYPE : POINTER_TYPE))
12294 || !validate_arg (len, INTEGER_TYPE)
12295 || !validate_arg (size, INTEGER_TYPE))
12296 return NULL_TREE;
12298 /* If SRC and DEST are the same (and not volatile), return DEST
12299 (resp. DEST+LEN for __mempcpy_chk). */
12300 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12302 if (fcode != BUILT_IN_MEMPCPY_CHK)
12303 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12304 else
12306 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12307 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12311 if (! host_integerp (size, 1))
12312 return NULL_TREE;
12314 if (! integer_all_onesp (size))
12316 if (! host_integerp (len, 1))
12318 /* If LEN is not constant, try MAXLEN too.
12319 For MAXLEN only allow optimizing into non-_ocs function
12320 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12321 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12323 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12325 /* (void) __mempcpy_chk () can be optimized into
12326 (void) __memcpy_chk (). */
12327 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12328 if (!fn)
12329 return NULL_TREE;
12331 return build_call_expr (fn, 4, dest, src, len, size);
12333 return NULL_TREE;
12336 else
12337 maxlen = len;
12339 if (tree_int_cst_lt (size, maxlen))
12340 return NULL_TREE;
12343 fn = NULL_TREE;
12344 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12345 mem{cpy,pcpy,move,set} is available. */
12346 switch (fcode)
12348 case BUILT_IN_MEMCPY_CHK:
12349 fn = built_in_decls[BUILT_IN_MEMCPY];
12350 break;
12351 case BUILT_IN_MEMPCPY_CHK:
12352 fn = built_in_decls[BUILT_IN_MEMPCPY];
12353 break;
12354 case BUILT_IN_MEMMOVE_CHK:
12355 fn = built_in_decls[BUILT_IN_MEMMOVE];
12356 break;
12357 case BUILT_IN_MEMSET_CHK:
12358 fn = built_in_decls[BUILT_IN_MEMSET];
12359 break;
12360 default:
12361 break;
12364 if (!fn)
12365 return NULL_TREE;
12367 return build_call_expr (fn, 3, dest, src, len);
12370 /* Fold a call to the __st[rp]cpy_chk builtin.
12371 DEST, SRC, and SIZE are the arguments to the call.
12372 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12373 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12374 strings passed as second argument. */
12376 tree
12377 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12378 tree maxlen, bool ignore,
12379 enum built_in_function fcode)
12381 tree len, fn;
12383 if (!validate_arg (dest, POINTER_TYPE)
12384 || !validate_arg (src, POINTER_TYPE)
12385 || !validate_arg (size, INTEGER_TYPE))
12386 return NULL_TREE;
12388 /* If SRC and DEST are the same (and not volatile), return DEST. */
12389 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12390 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12392 if (! host_integerp (size, 1))
12393 return NULL_TREE;
12395 if (! integer_all_onesp (size))
12397 len = c_strlen (src, 1);
12398 if (! len || ! host_integerp (len, 1))
12400 /* If LEN is not constant, try MAXLEN too.
12401 For MAXLEN only allow optimizing into non-_ocs function
12402 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12403 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12405 if (fcode == BUILT_IN_STPCPY_CHK)
12407 if (! ignore)
12408 return NULL_TREE;
12410 /* If return value of __stpcpy_chk is ignored,
12411 optimize into __strcpy_chk. */
12412 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12413 if (!fn)
12414 return NULL_TREE;
12416 return build_call_expr (fn, 3, dest, src, size);
12419 if (! len || TREE_SIDE_EFFECTS (len))
12420 return NULL_TREE;
12422 /* If c_strlen returned something, but not a constant,
12423 transform __strcpy_chk into __memcpy_chk. */
12424 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12425 if (!fn)
12426 return NULL_TREE;
12428 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12429 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12430 build_call_expr (fn, 4,
12431 dest, src, len, size));
12434 else
12435 maxlen = len;
12437 if (! tree_int_cst_lt (maxlen, size))
12438 return NULL_TREE;
12441 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12442 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12443 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12444 if (!fn)
12445 return NULL_TREE;
12447 return build_call_expr (fn, 2, dest, src);
12450 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12451 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12452 length passed as third argument. */
12454 tree
12455 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12456 tree maxlen)
12458 tree fn;
12460 if (!validate_arg (dest, POINTER_TYPE)
12461 || !validate_arg (src, POINTER_TYPE)
12462 || !validate_arg (len, INTEGER_TYPE)
12463 || !validate_arg (size, INTEGER_TYPE))
12464 return NULL_TREE;
12466 if (! host_integerp (size, 1))
12467 return NULL_TREE;
12469 if (! integer_all_onesp (size))
12471 if (! host_integerp (len, 1))
12473 /* If LEN is not constant, try MAXLEN too.
12474 For MAXLEN only allow optimizing into non-_ocs function
12475 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12476 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12477 return NULL_TREE;
12479 else
12480 maxlen = len;
12482 if (tree_int_cst_lt (size, maxlen))
12483 return NULL_TREE;
12486 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12487 fn = built_in_decls[BUILT_IN_STRNCPY];
12488 if (!fn)
12489 return NULL_TREE;
12491 return build_call_expr (fn, 3, dest, src, len);
12494 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12495 are the arguments to the call. */
12497 static tree
12498 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12500 tree fn;
12501 const char *p;
12503 if (!validate_arg (dest, POINTER_TYPE)
12504 || !validate_arg (src, POINTER_TYPE)
12505 || !validate_arg (size, INTEGER_TYPE))
12506 return NULL_TREE;
12508 p = c_getstr (src);
12509 /* If the SRC parameter is "", return DEST. */
12510 if (p && *p == '\0')
12511 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12513 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12514 return NULL_TREE;
12516 /* If __builtin_strcat_chk is used, assume strcat is available. */
12517 fn = built_in_decls[BUILT_IN_STRCAT];
12518 if (!fn)
12519 return NULL_TREE;
12521 return build_call_expr (fn, 2, dest, src);
12524 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12525 LEN, and SIZE. */
12527 static tree
12528 fold_builtin_strncat_chk (tree fndecl,
12529 tree dest, tree src, tree len, tree size)
12531 tree fn;
12532 const char *p;
12534 if (!validate_arg (dest, POINTER_TYPE)
12535 || !validate_arg (src, POINTER_TYPE)
12536 || !validate_arg (size, INTEGER_TYPE)
12537 || !validate_arg (size, INTEGER_TYPE))
12538 return NULL_TREE;
12540 p = c_getstr (src);
12541 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12542 if (p && *p == '\0')
12543 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12544 else if (integer_zerop (len))
12545 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12547 if (! host_integerp (size, 1))
12548 return NULL_TREE;
12550 if (! integer_all_onesp (size))
12552 tree src_len = c_strlen (src, 1);
12553 if (src_len
12554 && host_integerp (src_len, 1)
12555 && host_integerp (len, 1)
12556 && ! tree_int_cst_lt (len, src_len))
12558 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12559 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12560 if (!fn)
12561 return NULL_TREE;
12563 return build_call_expr (fn, 3, dest, src, size);
12565 return NULL_TREE;
12568 /* If __builtin_strncat_chk is used, assume strncat is available. */
12569 fn = built_in_decls[BUILT_IN_STRNCAT];
12570 if (!fn)
12571 return NULL_TREE;
12573 return build_call_expr (fn, 3, dest, src, len);
12576 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12577 a normal call should be emitted rather than expanding the function
12578 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12580 static tree
12581 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12583 tree dest, size, len, fn, fmt, flag;
12584 const char *fmt_str;
12585 int nargs = call_expr_nargs (exp);
12587 /* Verify the required arguments in the original call. */
12588 if (nargs < 4)
12589 return NULL_TREE;
12590 dest = CALL_EXPR_ARG (exp, 0);
12591 if (!validate_arg (dest, POINTER_TYPE))
12592 return NULL_TREE;
12593 flag = CALL_EXPR_ARG (exp, 1);
12594 if (!validate_arg (flag, INTEGER_TYPE))
12595 return NULL_TREE;
12596 size = CALL_EXPR_ARG (exp, 2);
12597 if (!validate_arg (size, INTEGER_TYPE))
12598 return NULL_TREE;
12599 fmt = CALL_EXPR_ARG (exp, 3);
12600 if (!validate_arg (fmt, POINTER_TYPE))
12601 return NULL_TREE;
12603 if (! host_integerp (size, 1))
12604 return NULL_TREE;
12606 len = NULL_TREE;
12608 if (!init_target_chars ())
12609 return NULL_TREE;
12611 /* Check whether the format is a literal string constant. */
12612 fmt_str = c_getstr (fmt);
12613 if (fmt_str != NULL)
12615 /* If the format doesn't contain % args or %%, we know the size. */
12616 if (strchr (fmt_str, target_percent) == 0)
12618 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12619 len = build_int_cstu (size_type_node, strlen (fmt_str));
12621 /* If the format is "%s" and first ... argument is a string literal,
12622 we know the size too. */
12623 else if (fcode == BUILT_IN_SPRINTF_CHK
12624 && strcmp (fmt_str, target_percent_s) == 0)
12626 tree arg;
12628 if (nargs == 5)
12630 arg = CALL_EXPR_ARG (exp, 4);
12631 if (validate_arg (arg, POINTER_TYPE))
12633 len = c_strlen (arg, 1);
12634 if (! len || ! host_integerp (len, 1))
12635 len = NULL_TREE;
12641 if (! integer_all_onesp (size))
12643 if (! len || ! tree_int_cst_lt (len, size))
12644 return NULL_TREE;
12647 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12648 or if format doesn't contain % chars or is "%s". */
12649 if (! integer_zerop (flag))
12651 if (fmt_str == NULL)
12652 return NULL_TREE;
12653 if (strchr (fmt_str, target_percent) != NULL
12654 && strcmp (fmt_str, target_percent_s))
12655 return NULL_TREE;
12658 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12659 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12660 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12661 if (!fn)
12662 return NULL_TREE;
12664 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12667 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12668 a normal call should be emitted rather than expanding the function
12669 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12670 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12671 passed as second argument. */
12673 tree
12674 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12675 enum built_in_function fcode)
12677 tree dest, size, len, fn, fmt, flag;
12678 const char *fmt_str;
12680 /* Verify the required arguments in the original call. */
12681 if (call_expr_nargs (exp) < 5)
12682 return NULL_TREE;
12683 dest = CALL_EXPR_ARG (exp, 0);
12684 if (!validate_arg (dest, POINTER_TYPE))
12685 return NULL_TREE;
12686 len = CALL_EXPR_ARG (exp, 1);
12687 if (!validate_arg (len, INTEGER_TYPE))
12688 return NULL_TREE;
12689 flag = CALL_EXPR_ARG (exp, 2);
12690 if (!validate_arg (flag, INTEGER_TYPE))
12691 return NULL_TREE;
12692 size = CALL_EXPR_ARG (exp, 3);
12693 if (!validate_arg (size, INTEGER_TYPE))
12694 return NULL_TREE;
12695 fmt = CALL_EXPR_ARG (exp, 4);
12696 if (!validate_arg (fmt, POINTER_TYPE))
12697 return NULL_TREE;
12699 if (! host_integerp (size, 1))
12700 return NULL_TREE;
12702 if (! integer_all_onesp (size))
12704 if (! host_integerp (len, 1))
12706 /* If LEN is not constant, try MAXLEN too.
12707 For MAXLEN only allow optimizing into non-_ocs function
12708 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12709 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12710 return NULL_TREE;
12712 else
12713 maxlen = len;
12715 if (tree_int_cst_lt (size, maxlen))
12716 return NULL_TREE;
12719 if (!init_target_chars ())
12720 return NULL_TREE;
12722 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12723 or if format doesn't contain % chars or is "%s". */
12724 if (! integer_zerop (flag))
12726 fmt_str = c_getstr (fmt);
12727 if (fmt_str == NULL)
12728 return NULL_TREE;
12729 if (strchr (fmt_str, target_percent) != NULL
12730 && strcmp (fmt_str, target_percent_s))
12731 return NULL_TREE;
12734 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12735 available. */
12736 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12737 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12738 if (!fn)
12739 return NULL_TREE;
12741 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12744 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12745 FMT and ARG are the arguments to the call; we don't fold cases with
12746 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12748 Return NULL_TREE if no simplification was possible, otherwise return the
12749 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12750 code of the function to be simplified. */
12752 static tree
12753 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12754 enum built_in_function fcode)
12756 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12757 const char *fmt_str = NULL;
12759 /* If the return value is used, don't do the transformation. */
12760 if (! ignore)
12761 return NULL_TREE;
12763 /* Verify the required arguments in the original call. */
12764 if (!validate_arg (fmt, POINTER_TYPE))
12765 return NULL_TREE;
12767 /* Check whether the format is a literal string constant. */
12768 fmt_str = c_getstr (fmt);
12769 if (fmt_str == NULL)
12770 return NULL_TREE;
12772 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12774 /* If we're using an unlocked function, assume the other
12775 unlocked functions exist explicitly. */
12776 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12777 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12779 else
12781 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12782 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12785 if (!init_target_chars ())
12786 return NULL_TREE;
12788 if (strcmp (fmt_str, target_percent_s) == 0
12789 || strchr (fmt_str, target_percent) == NULL)
12791 const char *str;
12793 if (strcmp (fmt_str, target_percent_s) == 0)
12795 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12796 return NULL_TREE;
12798 if (!arg || !validate_arg (arg, POINTER_TYPE))
12799 return NULL_TREE;
12801 str = c_getstr (arg);
12802 if (str == NULL)
12803 return NULL_TREE;
12805 else
12807 /* The format specifier doesn't contain any '%' characters. */
12808 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12809 && arg)
12810 return NULL_TREE;
12811 str = fmt_str;
12814 /* If the string was "", printf does nothing. */
12815 if (str[0] == '\0')
12816 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12818 /* If the string has length of 1, call putchar. */
12819 if (str[1] == '\0')
12821 /* Given printf("c"), (where c is any one character,)
12822 convert "c"[0] to an int and pass that to the replacement
12823 function. */
12824 newarg = build_int_cst (NULL_TREE, str[0]);
12825 if (fn_putchar)
12826 call = build_call_expr (fn_putchar, 1, newarg);
12828 else
12830 /* If the string was "string\n", call puts("string"). */
12831 size_t len = strlen (str);
12832 if ((unsigned char)str[len - 1] == target_newline)
12834 /* Create a NUL-terminated string that's one char shorter
12835 than the original, stripping off the trailing '\n'. */
12836 char *newstr = XALLOCAVEC (char, len);
12837 memcpy (newstr, str, len - 1);
12838 newstr[len - 1] = 0;
12840 newarg = build_string_literal (len, newstr);
12841 if (fn_puts)
12842 call = build_call_expr (fn_puts, 1, newarg);
12844 else
12845 /* We'd like to arrange to call fputs(string,stdout) here,
12846 but we need stdout and don't have a way to get it yet. */
12847 return NULL_TREE;
12851 /* The other optimizations can be done only on the non-va_list variants. */
12852 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12853 return NULL_TREE;
12855 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12856 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12858 if (!arg || !validate_arg (arg, POINTER_TYPE))
12859 return NULL_TREE;
12860 if (fn_puts)
12861 call = build_call_expr (fn_puts, 1, arg);
12864 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12865 else if (strcmp (fmt_str, target_percent_c) == 0)
12867 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12868 return NULL_TREE;
12869 if (fn_putchar)
12870 call = build_call_expr (fn_putchar, 1, arg);
12873 if (!call)
12874 return NULL_TREE;
12876 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12879 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12880 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12881 more than 3 arguments, and ARG may be null in the 2-argument case.
12883 Return NULL_TREE if no simplification was possible, otherwise return the
12884 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12885 code of the function to be simplified. */
12887 static tree
12888 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12889 enum built_in_function fcode)
12891 tree fn_fputc, fn_fputs, call = NULL_TREE;
12892 const char *fmt_str = NULL;
12894 /* If the return value is used, don't do the transformation. */
12895 if (! ignore)
12896 return NULL_TREE;
12898 /* Verify the required arguments in the original call. */
12899 if (!validate_arg (fp, POINTER_TYPE))
12900 return NULL_TREE;
12901 if (!validate_arg (fmt, POINTER_TYPE))
12902 return NULL_TREE;
12904 /* Check whether the format is a literal string constant. */
12905 fmt_str = c_getstr (fmt);
12906 if (fmt_str == NULL)
12907 return NULL_TREE;
12909 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12911 /* If we're using an unlocked function, assume the other
12912 unlocked functions exist explicitly. */
12913 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12914 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12916 else
12918 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12919 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12922 if (!init_target_chars ())
12923 return NULL_TREE;
12925 /* If the format doesn't contain % args or %%, use strcpy. */
12926 if (strchr (fmt_str, target_percent) == NULL)
12928 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12929 && arg)
12930 return NULL_TREE;
12932 /* If the format specifier was "", fprintf does nothing. */
12933 if (fmt_str[0] == '\0')
12935 /* If FP has side-effects, just wait until gimplification is
12936 done. */
12937 if (TREE_SIDE_EFFECTS (fp))
12938 return NULL_TREE;
12940 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12943 /* When "string" doesn't contain %, replace all cases of
12944 fprintf (fp, string) with fputs (string, fp). The fputs
12945 builtin will take care of special cases like length == 1. */
12946 if (fn_fputs)
12947 call = build_call_expr (fn_fputs, 2, fmt, fp);
12950 /* The other optimizations can be done only on the non-va_list variants. */
12951 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12952 return NULL_TREE;
12954 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12955 else if (strcmp (fmt_str, target_percent_s) == 0)
12957 if (!arg || !validate_arg (arg, POINTER_TYPE))
12958 return NULL_TREE;
12959 if (fn_fputs)
12960 call = build_call_expr (fn_fputs, 2, arg, fp);
12963 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12964 else if (strcmp (fmt_str, target_percent_c) == 0)
12966 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12967 return NULL_TREE;
12968 if (fn_fputc)
12969 call = build_call_expr (fn_fputc, 2, arg, fp);
12972 if (!call)
12973 return NULL_TREE;
12974 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12977 /* Initialize format string characters in the target charset. */
12979 static bool
12980 init_target_chars (void)
12982 static bool init;
12983 if (!init)
12985 target_newline = lang_hooks.to_target_charset ('\n');
12986 target_percent = lang_hooks.to_target_charset ('%');
12987 target_c = lang_hooks.to_target_charset ('c');
12988 target_s = lang_hooks.to_target_charset ('s');
12989 if (target_newline == 0 || target_percent == 0 || target_c == 0
12990 || target_s == 0)
12991 return false;
12993 target_percent_c[0] = target_percent;
12994 target_percent_c[1] = target_c;
12995 target_percent_c[2] = '\0';
12997 target_percent_s[0] = target_percent;
12998 target_percent_s[1] = target_s;
12999 target_percent_s[2] = '\0';
13001 target_percent_s_newline[0] = target_percent;
13002 target_percent_s_newline[1] = target_s;
13003 target_percent_s_newline[2] = target_newline;
13004 target_percent_s_newline[3] = '\0';
13006 init = true;
13008 return true;
13011 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13012 and no overflow/underflow occurred. INEXACT is true if M was not
13013 exactly calculated. TYPE is the tree type for the result. This
13014 function assumes that you cleared the MPFR flags and then
13015 calculated M to see if anything subsequently set a flag prior to
13016 entering this function. Return NULL_TREE if any checks fail. */
13018 static tree
13019 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13021 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13022 overflow/underflow occurred. If -frounding-math, proceed iff the
13023 result of calling FUNC was exact. */
13024 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13025 && (!flag_rounding_math || !inexact))
13027 REAL_VALUE_TYPE rr;
13029 real_from_mpfr (&rr, m, type, GMP_RNDN);
13030 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13031 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13032 but the mpft_t is not, then we underflowed in the
13033 conversion. */
13034 if (real_isfinite (&rr)
13035 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13037 REAL_VALUE_TYPE rmode;
13039 real_convert (&rmode, TYPE_MODE (type), &rr);
13040 /* Proceed iff the specified mode can hold the value. */
13041 if (real_identical (&rmode, &rr))
13042 return build_real (type, rmode);
13045 return NULL_TREE;
13048 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13049 FUNC on it and return the resulting value as a tree with type TYPE.
13050 If MIN and/or MAX are not NULL, then the supplied ARG must be
13051 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13052 acceptable values, otherwise they are not. The mpfr precision is
13053 set to the precision of TYPE. We assume that function FUNC returns
13054 zero if the result could be calculated exactly within the requested
13055 precision. */
13057 static tree
13058 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13059 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13060 bool inclusive)
13062 tree result = NULL_TREE;
13064 STRIP_NOPS (arg);
13066 /* To proceed, MPFR must exactly represent the target floating point
13067 format, which only happens when the target base equals two. */
13068 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13069 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13071 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13073 if (real_isfinite (ra)
13074 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13075 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13077 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13078 const int prec = fmt->p;
13079 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13080 int inexact;
13081 mpfr_t m;
13083 mpfr_init2 (m, prec);
13084 mpfr_from_real (m, ra, GMP_RNDN);
13085 mpfr_clear_flags ();
13086 inexact = func (m, m, rnd);
13087 result = do_mpfr_ckconv (m, type, inexact);
13088 mpfr_clear (m);
13092 return result;
13095 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13096 FUNC on it and return the resulting value as a tree with type TYPE.
13097 The mpfr precision is set to the precision of TYPE. We assume that
13098 function FUNC returns zero if the result could be calculated
13099 exactly within the requested precision. */
13101 static tree
13102 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13103 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13105 tree result = NULL_TREE;
13107 STRIP_NOPS (arg1);
13108 STRIP_NOPS (arg2);
13110 /* To proceed, MPFR must exactly represent the target floating point
13111 format, which only happens when the target base equals two. */
13112 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13113 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13114 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13116 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13117 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13119 if (real_isfinite (ra1) && real_isfinite (ra2))
13121 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13122 const int prec = fmt->p;
13123 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13124 int inexact;
13125 mpfr_t m1, m2;
13127 mpfr_inits2 (prec, m1, m2, NULL);
13128 mpfr_from_real (m1, ra1, GMP_RNDN);
13129 mpfr_from_real (m2, ra2, GMP_RNDN);
13130 mpfr_clear_flags ();
13131 inexact = func (m1, m1, m2, rnd);
13132 result = do_mpfr_ckconv (m1, type, inexact);
13133 mpfr_clears (m1, m2, NULL);
13137 return result;
13140 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13141 FUNC on it and return the resulting value as a tree with type TYPE.
13142 The mpfr precision is set to the precision of TYPE. We assume that
13143 function FUNC returns zero if the result could be calculated
13144 exactly within the requested precision. */
13146 static tree
13147 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13148 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13150 tree result = NULL_TREE;
13152 STRIP_NOPS (arg1);
13153 STRIP_NOPS (arg2);
13154 STRIP_NOPS (arg3);
13156 /* To proceed, MPFR must exactly represent the target floating point
13157 format, which only happens when the target base equals two. */
13158 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13159 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13160 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13161 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13163 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13164 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13165 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13167 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13169 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13170 const int prec = fmt->p;
13171 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13172 int inexact;
13173 mpfr_t m1, m2, m3;
13175 mpfr_inits2 (prec, m1, m2, m3, NULL);
13176 mpfr_from_real (m1, ra1, GMP_RNDN);
13177 mpfr_from_real (m2, ra2, GMP_RNDN);
13178 mpfr_from_real (m3, ra3, GMP_RNDN);
13179 mpfr_clear_flags ();
13180 inexact = func (m1, m1, m2, m3, rnd);
13181 result = do_mpfr_ckconv (m1, type, inexact);
13182 mpfr_clears (m1, m2, m3, NULL);
13186 return result;
13189 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13190 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13191 If ARG_SINP and ARG_COSP are NULL then the result is returned
13192 as a complex value.
13193 The type is taken from the type of ARG and is used for setting the
13194 precision of the calculation and results. */
13196 static tree
13197 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13199 tree const type = TREE_TYPE (arg);
13200 tree result = NULL_TREE;
13202 STRIP_NOPS (arg);
13204 /* To proceed, MPFR must exactly represent the target floating point
13205 format, which only happens when the target base equals two. */
13206 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13207 && TREE_CODE (arg) == REAL_CST
13208 && !TREE_OVERFLOW (arg))
13210 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13212 if (real_isfinite (ra))
13214 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13215 const int prec = fmt->p;
13216 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13217 tree result_s, result_c;
13218 int inexact;
13219 mpfr_t m, ms, mc;
13221 mpfr_inits2 (prec, m, ms, mc, NULL);
13222 mpfr_from_real (m, ra, GMP_RNDN);
13223 mpfr_clear_flags ();
13224 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13225 result_s = do_mpfr_ckconv (ms, type, inexact);
13226 result_c = do_mpfr_ckconv (mc, type, inexact);
13227 mpfr_clears (m, ms, mc, NULL);
13228 if (result_s && result_c)
13230 /* If we are to return in a complex value do so. */
13231 if (!arg_sinp && !arg_cosp)
13232 return build_complex (build_complex_type (type),
13233 result_c, result_s);
13235 /* Dereference the sin/cos pointer arguments. */
13236 arg_sinp = build_fold_indirect_ref (arg_sinp);
13237 arg_cosp = build_fold_indirect_ref (arg_cosp);
13238 /* Proceed if valid pointer type were passed in. */
13239 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13240 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13242 /* Set the values. */
13243 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13244 result_s);
13245 TREE_SIDE_EFFECTS (result_s) = 1;
13246 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13247 result_c);
13248 TREE_SIDE_EFFECTS (result_c) = 1;
13249 /* Combine the assignments into a compound expr. */
13250 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13251 result_s, result_c));
13256 return result;
13259 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13260 two-argument mpfr order N Bessel function FUNC on them and return
13261 the resulting value as a tree with type TYPE. The mpfr precision
13262 is set to the precision of TYPE. We assume that function FUNC
13263 returns zero if the result could be calculated exactly within the
13264 requested precision. */
13265 static tree
13266 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13267 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13268 const REAL_VALUE_TYPE *min, bool inclusive)
13270 tree result = NULL_TREE;
13272 STRIP_NOPS (arg1);
13273 STRIP_NOPS (arg2);
13275 /* To proceed, MPFR must exactly represent the target floating point
13276 format, which only happens when the target base equals two. */
13277 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13278 && host_integerp (arg1, 0)
13279 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13281 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13282 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13284 if (n == (long)n
13285 && real_isfinite (ra)
13286 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13288 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13289 const int prec = fmt->p;
13290 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13291 int inexact;
13292 mpfr_t m;
13294 mpfr_init2 (m, prec);
13295 mpfr_from_real (m, ra, GMP_RNDN);
13296 mpfr_clear_flags ();
13297 inexact = func (m, n, m, rnd);
13298 result = do_mpfr_ckconv (m, type, inexact);
13299 mpfr_clear (m);
13303 return result;
13306 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13307 the pointer *(ARG_QUO) and return the result. The type is taken
13308 from the type of ARG0 and is used for setting the precision of the
13309 calculation and results. */
13311 static tree
13312 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13314 tree const type = TREE_TYPE (arg0);
13315 tree result = NULL_TREE;
13317 STRIP_NOPS (arg0);
13318 STRIP_NOPS (arg1);
13320 /* To proceed, MPFR must exactly represent the target floating point
13321 format, which only happens when the target base equals two. */
13322 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13323 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13324 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13326 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13327 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13329 if (real_isfinite (ra0) && real_isfinite (ra1))
13331 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13332 const int prec = fmt->p;
13333 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13334 tree result_rem;
13335 long integer_quo;
13336 mpfr_t m0, m1;
13338 mpfr_inits2 (prec, m0, m1, NULL);
13339 mpfr_from_real (m0, ra0, GMP_RNDN);
13340 mpfr_from_real (m1, ra1, GMP_RNDN);
13341 mpfr_clear_flags ();
13342 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13343 /* Remquo is independent of the rounding mode, so pass
13344 inexact=0 to do_mpfr_ckconv(). */
13345 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13346 mpfr_clears (m0, m1, NULL);
13347 if (result_rem)
13349 /* MPFR calculates quo in the host's long so it may
13350 return more bits in quo than the target int can hold
13351 if sizeof(host long) > sizeof(target int). This can
13352 happen even for native compilers in LP64 mode. In
13353 these cases, modulo the quo value with the largest
13354 number that the target int can hold while leaving one
13355 bit for the sign. */
13356 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13357 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13359 /* Dereference the quo pointer argument. */
13360 arg_quo = build_fold_indirect_ref (arg_quo);
13361 /* Proceed iff a valid pointer type was passed in. */
13362 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13364 /* Set the value. */
13365 tree result_quo = fold_build2 (MODIFY_EXPR,
13366 TREE_TYPE (arg_quo), arg_quo,
13367 build_int_cst (NULL, integer_quo));
13368 TREE_SIDE_EFFECTS (result_quo) = 1;
13369 /* Combine the quo assignment with the rem. */
13370 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13371 result_quo, result_rem));
13376 return result;
13379 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13380 resulting value as a tree with type TYPE. The mpfr precision is
13381 set to the precision of TYPE. We assume that this mpfr function
13382 returns zero if the result could be calculated exactly within the
13383 requested precision. In addition, the integer pointer represented
13384 by ARG_SG will be dereferenced and set to the appropriate signgam
13385 (-1,1) value. */
13387 static tree
13388 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13390 tree result = NULL_TREE;
13392 STRIP_NOPS (arg);
13394 /* To proceed, MPFR must exactly represent the target floating point
13395 format, which only happens when the target base equals two. Also
13396 verify ARG is a constant and that ARG_SG is an int pointer. */
13397 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13398 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13399 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13400 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13402 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13404 /* In addition to NaN and Inf, the argument cannot be zero or a
13405 negative integer. */
13406 if (real_isfinite (ra)
13407 && ra->cl != rvc_zero
13408 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13410 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13411 const int prec = fmt->p;
13412 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13413 int inexact, sg;
13414 mpfr_t m;
13415 tree result_lg;
13417 mpfr_init2 (m, prec);
13418 mpfr_from_real (m, ra, GMP_RNDN);
13419 mpfr_clear_flags ();
13420 inexact = mpfr_lgamma (m, &sg, m, rnd);
13421 result_lg = do_mpfr_ckconv (m, type, inexact);
13422 mpfr_clear (m);
13423 if (result_lg)
13425 tree result_sg;
13427 /* Dereference the arg_sg pointer argument. */
13428 arg_sg = build_fold_indirect_ref (arg_sg);
13429 /* Assign the signgam value into *arg_sg. */
13430 result_sg = fold_build2 (MODIFY_EXPR,
13431 TREE_TYPE (arg_sg), arg_sg,
13432 build_int_cst (NULL, sg));
13433 TREE_SIDE_EFFECTS (result_sg) = 1;
13434 /* Combine the signgam assignment with the lgamma result. */
13435 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13436 result_sg, result_lg));
13441 return result;
13444 /* FIXME tuples.
13445 The functions below provide an alternate interface for folding
13446 builtin function calls presented as GIMPLE_CALL statements rather
13447 than as CALL_EXPRs. The folded result is still expressed as a
13448 tree. There is too much code duplication in the handling of
13449 varargs functions, and a more intrusive re-factoring would permit
13450 better sharing of code between the tree and statement-based
13451 versions of these functions. */
13453 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13454 along with N new arguments specified as the "..." parameters. SKIP
13455 is the number of arguments in STMT to be omitted. This function is used
13456 to do varargs-to-varargs transformations. */
13458 static tree
13459 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13461 int oldnargs = gimple_call_num_args (stmt);
13462 int nargs = oldnargs - skip + n;
13463 tree fntype = TREE_TYPE (fndecl);
13464 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13465 tree *buffer;
13466 int i, j;
13467 va_list ap;
13469 buffer = XALLOCAVEC (tree, nargs);
13470 va_start (ap, n);
13471 for (i = 0; i < n; i++)
13472 buffer[i] = va_arg (ap, tree);
13473 va_end (ap);
13474 for (j = skip; j < oldnargs; j++, i++)
13475 buffer[i] = gimple_call_arg (stmt, j);
13477 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13480 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13481 a normal call should be emitted rather than expanding the function
13482 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13484 static tree
13485 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13487 tree dest, size, len, fn, fmt, flag;
13488 const char *fmt_str;
13489 int nargs = gimple_call_num_args (stmt);
13491 /* Verify the required arguments in the original call. */
13492 if (nargs < 4)
13493 return NULL_TREE;
13494 dest = gimple_call_arg (stmt, 0);
13495 if (!validate_arg (dest, POINTER_TYPE))
13496 return NULL_TREE;
13497 flag = gimple_call_arg (stmt, 1);
13498 if (!validate_arg (flag, INTEGER_TYPE))
13499 return NULL_TREE;
13500 size = gimple_call_arg (stmt, 2);
13501 if (!validate_arg (size, INTEGER_TYPE))
13502 return NULL_TREE;
13503 fmt = gimple_call_arg (stmt, 3);
13504 if (!validate_arg (fmt, POINTER_TYPE))
13505 return NULL_TREE;
13507 if (! host_integerp (size, 1))
13508 return NULL_TREE;
13510 len = NULL_TREE;
13512 if (!init_target_chars ())
13513 return NULL_TREE;
13515 /* Check whether the format is a literal string constant. */
13516 fmt_str = c_getstr (fmt);
13517 if (fmt_str != NULL)
13519 /* If the format doesn't contain % args or %%, we know the size. */
13520 if (strchr (fmt_str, target_percent) == 0)
13522 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13523 len = build_int_cstu (size_type_node, strlen (fmt_str));
13525 /* If the format is "%s" and first ... argument is a string literal,
13526 we know the size too. */
13527 else if (fcode == BUILT_IN_SPRINTF_CHK
13528 && strcmp (fmt_str, target_percent_s) == 0)
13530 tree arg;
13532 if (nargs == 5)
13534 arg = gimple_call_arg (stmt, 4);
13535 if (validate_arg (arg, POINTER_TYPE))
13537 len = c_strlen (arg, 1);
13538 if (! len || ! host_integerp (len, 1))
13539 len = NULL_TREE;
13545 if (! integer_all_onesp (size))
13547 if (! len || ! tree_int_cst_lt (len, size))
13548 return NULL_TREE;
13551 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13552 or if format doesn't contain % chars or is "%s". */
13553 if (! integer_zerop (flag))
13555 if (fmt_str == NULL)
13556 return NULL_TREE;
13557 if (strchr (fmt_str, target_percent) != NULL
13558 && strcmp (fmt_str, target_percent_s))
13559 return NULL_TREE;
13562 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13563 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13564 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13565 if (!fn)
13566 return NULL_TREE;
13568 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13571 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13572 a normal call should be emitted rather than expanding the function
13573 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13574 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13575 passed as second argument. */
13577 tree
13578 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13579 enum built_in_function fcode)
13581 tree dest, size, len, fn, fmt, flag;
13582 const char *fmt_str;
13584 /* Verify the required arguments in the original call. */
13585 if (gimple_call_num_args (stmt) < 5)
13586 return NULL_TREE;
13587 dest = gimple_call_arg (stmt, 0);
13588 if (!validate_arg (dest, POINTER_TYPE))
13589 return NULL_TREE;
13590 len = gimple_call_arg (stmt, 1);
13591 if (!validate_arg (len, INTEGER_TYPE))
13592 return NULL_TREE;
13593 flag = gimple_call_arg (stmt, 2);
13594 if (!validate_arg (flag, INTEGER_TYPE))
13595 return NULL_TREE;
13596 size = gimple_call_arg (stmt, 3);
13597 if (!validate_arg (size, INTEGER_TYPE))
13598 return NULL_TREE;
13599 fmt = gimple_call_arg (stmt, 4);
13600 if (!validate_arg (fmt, POINTER_TYPE))
13601 return NULL_TREE;
13603 if (! host_integerp (size, 1))
13604 return NULL_TREE;
13606 if (! integer_all_onesp (size))
13608 if (! host_integerp (len, 1))
13610 /* If LEN is not constant, try MAXLEN too.
13611 For MAXLEN only allow optimizing into non-_ocs function
13612 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13613 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13614 return NULL_TREE;
13616 else
13617 maxlen = len;
13619 if (tree_int_cst_lt (size, maxlen))
13620 return NULL_TREE;
13623 if (!init_target_chars ())
13624 return NULL_TREE;
13626 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13627 or if format doesn't contain % chars or is "%s". */
13628 if (! integer_zerop (flag))
13630 fmt_str = c_getstr (fmt);
13631 if (fmt_str == NULL)
13632 return NULL_TREE;
13633 if (strchr (fmt_str, target_percent) != NULL
13634 && strcmp (fmt_str, target_percent_s))
13635 return NULL_TREE;
13638 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13639 available. */
13640 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13641 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13642 if (!fn)
13643 return NULL_TREE;
13645 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13648 /* Builtins with folding operations that operate on "..." arguments
13649 need special handling; we need to store the arguments in a convenient
13650 data structure before attempting any folding. Fortunately there are
13651 only a few builtins that fall into this category. FNDECL is the
13652 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13653 result of the function call is ignored. */
13655 static tree
13656 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13658 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13659 tree ret = NULL_TREE;
13661 switch (fcode)
13663 case BUILT_IN_SPRINTF_CHK:
13664 case BUILT_IN_VSPRINTF_CHK:
13665 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13666 break;
13668 case BUILT_IN_SNPRINTF_CHK:
13669 case BUILT_IN_VSNPRINTF_CHK:
13670 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13672 default:
13673 break;
13675 if (ret)
13677 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13678 TREE_NO_WARNING (ret) = 1;
13679 return ret;
13681 return NULL_TREE;
13684 /* A wrapper function for builtin folding that prevents warnings for
13685 "statement without effect" and the like, caused by removing the
13686 call node earlier than the warning is generated. */
13688 tree
13689 fold_call_stmt (gimple stmt, bool ignore)
13691 tree ret = NULL_TREE;
13692 tree fndecl = gimple_call_fndecl (stmt);
13693 if (fndecl
13694 && TREE_CODE (fndecl) == FUNCTION_DECL
13695 && DECL_BUILT_IN (fndecl)
13696 && !gimple_call_va_arg_pack_p (stmt))
13698 int nargs = gimple_call_num_args (stmt);
13700 if (avoid_folding_inline_builtin (fndecl))
13701 return NULL_TREE;
13702 /* FIXME: Don't use a list in this interface. */
13703 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13705 tree arglist = NULL_TREE;
13706 int i;
13707 for (i = nargs - 1; i >= 0; i--)
13708 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13709 return targetm.fold_builtin (fndecl, arglist, ignore);
13711 else
13713 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13715 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13716 int i;
13717 for (i = 0; i < nargs; i++)
13718 args[i] = gimple_call_arg (stmt, i);
13719 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13721 if (!ret)
13722 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13723 if (ret)
13725 /* Propagate location information from original call to
13726 expansion of builtin. Otherwise things like
13727 maybe_emit_chk_warning, that operate on the expansion
13728 of a builtin, will use the wrong location information. */
13729 if (gimple_has_location (stmt))
13731 tree realret = ret;
13732 if (TREE_CODE (ret) == NOP_EXPR)
13733 realret = TREE_OPERAND (ret, 0);
13734 if (CAN_HAVE_LOCATION_P (realret)
13735 && !EXPR_HAS_LOCATION (realret))
13736 SET_EXPR_LOCATION (realret, gimple_location (stmt));
13737 return realret;
13739 return ret;
13743 return NULL_TREE;