2009-01-28 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / builtins.c
blob57cce08abe53a5b02d28349ad594a361b380b8d1
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[4]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names[(int) END_BUILTINS] =
69 #include "builtins.def"
71 #undef DEF_BUILTIN
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 tree built_in_decls[(int) END_BUILTINS];
76 /* Declarations used when constructing the builtin implicitly in the compiler.
77 It may be NULL_TREE when this is invalid (for instance runtime is not
78 required to implement the function call in all cases). */
79 tree implicit_built_in_decls[(int) END_BUILTINS];
81 static const char *c_getstr (tree);
82 static rtx c_readstr (const char *, enum machine_mode);
83 static int target_char_cast (tree, char *);
84 static rtx get_memory_rtx (tree, tree);
85 static int apply_args_size (void);
86 static int apply_result_size (void);
87 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
88 static rtx result_vector (int, rtx);
89 #endif
90 static void expand_builtin_update_setjmp_buf (rtx);
91 static void expand_builtin_prefetch (tree);
92 static rtx expand_builtin_apply_args (void);
93 static rtx expand_builtin_apply_args_1 (void);
94 static rtx expand_builtin_apply (rtx, rtx, rtx);
95 static void expand_builtin_return (rtx);
96 static enum type_class type_to_class (tree);
97 static rtx expand_builtin_classify_type (tree);
98 static void expand_errno_check (tree, rtx);
99 static rtx expand_builtin_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_args_info (tree);
108 static rtx expand_builtin_next_arg (void);
109 static rtx expand_builtin_va_start (tree);
110 static rtx expand_builtin_va_end (tree);
111 static rtx expand_builtin_va_copy (tree);
112 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
126 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_bcopy (tree, int);
129 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
131 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
133 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
134 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_alloca (tree, rtx);
143 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
144 static rtx expand_builtin_frame_address (tree, tree);
145 static rtx expand_builtin_fputs (tree, rtx, bool);
146 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
147 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
148 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
149 static tree stabilize_va_list (tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static tree fold_builtin_constant_p (tree);
152 static tree fold_builtin_expect (tree, tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (tree);
155 static tree fold_builtin_inf (tree, int);
156 static tree fold_builtin_nan (tree, tree, int);
157 static tree rewrite_call_expr (tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static bool integer_valued_real_p (tree);
160 static tree fold_trunc_transparent_mathfn (tree, tree);
161 static bool readonly_data_expr (tree);
162 static rtx expand_builtin_fabs (tree, rtx, rtx);
163 static rtx expand_builtin_signbit (tree, rtx);
164 static tree fold_builtin_sqrt (tree, tree);
165 static tree fold_builtin_cbrt (tree, tree);
166 static tree fold_builtin_pow (tree, tree, tree, tree);
167 static tree fold_builtin_powi (tree, tree, tree, tree);
168 static tree fold_builtin_cos (tree, tree, tree);
169 static tree fold_builtin_cosh (tree, tree, tree);
170 static tree fold_builtin_tan (tree, tree);
171 static tree fold_builtin_trunc (tree, tree);
172 static tree fold_builtin_floor (tree, tree);
173 static tree fold_builtin_ceil (tree, tree);
174 static tree fold_builtin_round (tree, tree);
175 static tree fold_builtin_int_roundingfn (tree, tree);
176 static tree fold_builtin_bitop (tree, tree);
177 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
178 static tree fold_builtin_strchr (tree, tree, tree);
179 static tree fold_builtin_memchr (tree, tree, tree, tree);
180 static tree fold_builtin_memcmp (tree, tree, tree);
181 static tree fold_builtin_strcmp (tree, tree);
182 static tree fold_builtin_strncmp (tree, tree, tree);
183 static tree fold_builtin_signbit (tree, tree);
184 static tree fold_builtin_copysign (tree, tree, tree, tree);
185 static tree fold_builtin_isascii (tree);
186 static tree fold_builtin_toascii (tree);
187 static tree fold_builtin_isdigit (tree);
188 static tree fold_builtin_fabs (tree, tree);
189 static tree fold_builtin_abs (tree, tree);
190 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
191 enum tree_code);
192 static tree fold_builtin_n (tree, tree *, int, bool);
193 static tree fold_builtin_0 (tree, bool);
194 static tree fold_builtin_1 (tree, tree, bool);
195 static tree fold_builtin_2 (tree, tree, tree, bool);
196 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
197 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
198 static tree fold_builtin_varargs (tree, tree, bool);
200 static tree fold_builtin_strpbrk (tree, tree, tree);
201 static tree fold_builtin_strstr (tree, tree, tree);
202 static tree fold_builtin_strrchr (tree, tree, tree);
203 static tree fold_builtin_strcat (tree, tree);
204 static tree fold_builtin_strncat (tree, tree, tree);
205 static tree fold_builtin_strspn (tree, tree);
206 static tree fold_builtin_strcspn (tree, tree);
207 static tree fold_builtin_sprintf (tree, tree, tree, int);
209 static rtx expand_builtin_object_size (tree);
210 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
211 enum built_in_function);
212 static void maybe_emit_chk_warning (tree, enum built_in_function);
213 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
214 static void maybe_emit_free_warning (tree);
215 static tree fold_builtin_object_size (tree, tree);
216 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
217 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
218 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
219 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
220 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
221 enum built_in_function);
222 static bool init_target_chars (void);
224 static unsigned HOST_WIDE_INT target_newline;
225 static unsigned HOST_WIDE_INT target_percent;
226 static unsigned HOST_WIDE_INT target_c;
227 static unsigned HOST_WIDE_INT target_s;
228 static char target_percent_c[3];
229 static char target_percent_s[3];
230 static char target_percent_s_newline[4];
231 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
232 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
233 static tree do_mpfr_arg2 (tree, tree, tree,
234 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
235 static tree do_mpfr_arg3 (tree, tree, tree, tree,
236 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
237 static tree do_mpfr_sincos (tree, tree, tree);
238 static tree do_mpfr_bessel_n (tree, tree, tree,
239 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
240 const REAL_VALUE_TYPE *, bool);
241 static tree do_mpfr_remquo (tree, tree, tree);
242 static tree do_mpfr_lgamma_r (tree, tree, tree);
244 /* Return true if NODE should be considered for inline expansion regardless
245 of the optimization level. This means whenever a function is invoked with
246 its "internal" name, which normally contains the prefix "__builtin". */
248 static bool called_as_built_in (tree node)
250 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
251 if (strncmp (name, "__builtin_", 10) == 0)
252 return true;
253 if (strncmp (name, "__sync_", 7) == 0)
254 return true;
255 return false;
258 /* Return the alignment in bits of EXP, an object.
259 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
260 guessed alignment e.g. from type alignment. */
263 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
265 unsigned int inner;
267 inner = max_align;
268 if (handled_component_p (exp))
270 HOST_WIDE_INT bitsize, bitpos;
271 tree offset;
272 enum machine_mode mode;
273 int unsignedp, volatilep;
275 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
276 &mode, &unsignedp, &volatilep, true);
277 if (bitpos)
278 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
279 while (offset)
281 tree next_offset;
283 if (TREE_CODE (offset) == PLUS_EXPR)
285 next_offset = TREE_OPERAND (offset, 0);
286 offset = TREE_OPERAND (offset, 1);
288 else
289 next_offset = NULL;
290 if (host_integerp (offset, 1))
292 /* Any overflow in calculating offset_bits won't change
293 the alignment. */
294 unsigned offset_bits
295 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
297 if (offset_bits)
298 inner = MIN (inner, (offset_bits & -offset_bits));
300 else if (TREE_CODE (offset) == MULT_EXPR
301 && host_integerp (TREE_OPERAND (offset, 1), 1))
303 /* Any overflow in calculating offset_factor won't change
304 the alignment. */
305 unsigned offset_factor
306 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
307 * BITS_PER_UNIT);
309 if (offset_factor)
310 inner = MIN (inner, (offset_factor & -offset_factor));
312 else
314 inner = MIN (inner, BITS_PER_UNIT);
315 break;
317 offset = next_offset;
320 if (DECL_P (exp))
321 align = MIN (inner, DECL_ALIGN (exp));
322 #ifdef CONSTANT_ALIGNMENT
323 else if (CONSTANT_CLASS_P (exp))
324 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
325 #endif
326 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
327 || TREE_CODE (exp) == INDIRECT_REF)
328 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
329 else
330 align = MIN (align, inner);
331 return MIN (align, max_align);
334 /* Return the alignment in bits of EXP, a pointer valued expression.
335 But don't return more than MAX_ALIGN no matter what.
336 The alignment returned is, by default, the alignment of the thing that
337 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
339 Otherwise, look at the expression to see if we can do better, i.e., if the
340 expression is actually pointing at an object whose alignment is tighter. */
343 get_pointer_alignment (tree exp, unsigned int max_align)
345 unsigned int align, inner;
347 /* We rely on TER to compute accurate alignment information. */
348 if (!(optimize && flag_tree_ter))
349 return 0;
351 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
352 return 0;
354 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
355 align = MIN (align, max_align);
357 while (1)
359 switch (TREE_CODE (exp))
361 CASE_CONVERT:
362 exp = TREE_OPERAND (exp, 0);
363 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
364 return align;
366 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
367 align = MIN (inner, max_align);
368 break;
370 case POINTER_PLUS_EXPR:
371 /* If sum of pointer + int, restrict our maximum alignment to that
372 imposed by the integer. If not, we can't do any better than
373 ALIGN. */
374 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
375 return align;
377 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
378 & (max_align / BITS_PER_UNIT - 1))
379 != 0)
380 max_align >>= 1;
382 exp = TREE_OPERAND (exp, 0);
383 break;
385 case ADDR_EXPR:
386 /* See what we are pointing at and look at its alignment. */
387 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
389 default:
390 return align;
395 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
396 way, because it could contain a zero byte in the middle.
397 TREE_STRING_LENGTH is the size of the character array, not the string.
399 ONLY_VALUE should be nonzero if the result is not going to be emitted
400 into the instruction stream and zero if it is going to be expanded.
401 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
402 is returned, otherwise NULL, since
403 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
404 evaluate the side-effects.
406 The value returned is of type `ssizetype'.
408 Unfortunately, string_constant can't access the values of const char
409 arrays with initializers, so neither can we do so here. */
411 tree
412 c_strlen (tree src, int only_value)
414 tree offset_node;
415 HOST_WIDE_INT offset;
416 int max;
417 const char *ptr;
419 STRIP_NOPS (src);
420 if (TREE_CODE (src) == COND_EXPR
421 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
423 tree len1, len2;
425 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
426 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
427 if (tree_int_cst_equal (len1, len2))
428 return len1;
431 if (TREE_CODE (src) == COMPOUND_EXPR
432 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
433 return c_strlen (TREE_OPERAND (src, 1), only_value);
435 src = string_constant (src, &offset_node);
436 if (src == 0)
437 return NULL_TREE;
439 max = TREE_STRING_LENGTH (src) - 1;
440 ptr = TREE_STRING_POINTER (src);
442 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
444 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
445 compute the offset to the following null if we don't know where to
446 start searching for it. */
447 int i;
449 for (i = 0; i < max; i++)
450 if (ptr[i] == 0)
451 return NULL_TREE;
453 /* We don't know the starting offset, but we do know that the string
454 has no internal zero bytes. We can assume that the offset falls
455 within the bounds of the string; otherwise, the programmer deserves
456 what he gets. Subtract the offset from the length of the string,
457 and return that. This would perhaps not be valid if we were dealing
458 with named arrays in addition to literal string constants. */
460 return size_diffop (size_int (max), offset_node);
463 /* We have a known offset into the string. Start searching there for
464 a null character if we can represent it as a single HOST_WIDE_INT. */
465 if (offset_node == 0)
466 offset = 0;
467 else if (! host_integerp (offset_node, 0))
468 offset = -1;
469 else
470 offset = tree_low_cst (offset_node, 0);
472 /* If the offset is known to be out of bounds, warn, and call strlen at
473 runtime. */
474 if (offset < 0 || offset > max)
476 /* Suppress multiple warnings for propagated constant strings. */
477 if (! TREE_NO_WARNING (src))
479 warning (0, "offset outside bounds of constant string");
480 TREE_NO_WARNING (src) = 1;
482 return NULL_TREE;
485 /* Use strlen to search for the first zero byte. Since any strings
486 constructed with build_string will have nulls appended, we win even
487 if we get handed something like (char[4])"abcd".
489 Since OFFSET is our starting index into the string, no further
490 calculation is needed. */
491 return ssize_int (strlen (ptr + offset));
494 /* Return a char pointer for a C string if it is a string constant
495 or sum of string constant and integer constant. */
497 static const char *
498 c_getstr (tree src)
500 tree offset_node;
502 src = string_constant (src, &offset_node);
503 if (src == 0)
504 return 0;
506 if (offset_node == 0)
507 return TREE_STRING_POINTER (src);
508 else if (!host_integerp (offset_node, 1)
509 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
510 return 0;
512 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
515 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
516 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
518 static rtx
519 c_readstr (const char *str, enum machine_mode mode)
521 HOST_WIDE_INT c[2];
522 HOST_WIDE_INT ch;
523 unsigned int i, j;
525 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
527 c[0] = 0;
528 c[1] = 0;
529 ch = 1;
530 for (i = 0; i < GET_MODE_SIZE (mode); i++)
532 j = i;
533 if (WORDS_BIG_ENDIAN)
534 j = GET_MODE_SIZE (mode) - i - 1;
535 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
536 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
537 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
538 j *= BITS_PER_UNIT;
539 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
541 if (ch)
542 ch = (unsigned char) str[i];
543 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
545 return immed_double_const (c[0], c[1], mode);
548 /* Cast a target constant CST to target CHAR and if that value fits into
549 host char type, return zero and put that value into variable pointed to by
550 P. */
552 static int
553 target_char_cast (tree cst, char *p)
555 unsigned HOST_WIDE_INT val, hostval;
557 if (!host_integerp (cst, 1)
558 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
559 return 1;
561 val = tree_low_cst (cst, 1);
562 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
563 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
565 hostval = val;
566 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
567 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
569 if (val != hostval)
570 return 1;
572 *p = hostval;
573 return 0;
576 /* Similar to save_expr, but assumes that arbitrary code is not executed
577 in between the multiple evaluations. In particular, we assume that a
578 non-addressable local variable will not be modified. */
580 static tree
581 builtin_save_expr (tree exp)
583 if (TREE_ADDRESSABLE (exp) == 0
584 && (TREE_CODE (exp) == PARM_DECL
585 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
586 return exp;
588 return save_expr (exp);
591 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
592 times to get the address of either a higher stack frame, or a return
593 address located within it (depending on FNDECL_CODE). */
595 static rtx
596 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
598 int i;
600 #ifdef INITIAL_FRAME_ADDRESS_RTX
601 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
602 #else
603 rtx tem;
605 /* For a zero count with __builtin_return_address, we don't care what
606 frame address we return, because target-specific definitions will
607 override us. Therefore frame pointer elimination is OK, and using
608 the soft frame pointer is OK.
610 For a nonzero count, or a zero count with __builtin_frame_address,
611 we require a stable offset from the current frame pointer to the
612 previous one, so we must use the hard frame pointer, and
613 we must disable frame pointer elimination. */
614 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
615 tem = frame_pointer_rtx;
616 else
618 tem = hard_frame_pointer_rtx;
620 /* Tell reload not to eliminate the frame pointer. */
621 crtl->accesses_prior_frames = 1;
623 #endif
625 /* Some machines need special handling before we can access
626 arbitrary frames. For example, on the SPARC, we must first flush
627 all register windows to the stack. */
628 #ifdef SETUP_FRAME_ADDRESSES
629 if (count > 0)
630 SETUP_FRAME_ADDRESSES ();
631 #endif
633 /* On the SPARC, the return address is not in the frame, it is in a
634 register. There is no way to access it off of the current frame
635 pointer, but it can be accessed off the previous frame pointer by
636 reading the value from the register window save area. */
637 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
638 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 count--;
640 #endif
642 /* Scan back COUNT frames to the specified frame. */
643 for (i = 0; i < count; i++)
645 /* Assume the dynamic chain pointer is in the word that the
646 frame address points to, unless otherwise specified. */
647 #ifdef DYNAMIC_CHAIN_ADDRESS
648 tem = DYNAMIC_CHAIN_ADDRESS (tem);
649 #endif
650 tem = memory_address (Pmode, tem);
651 tem = gen_frame_mem (Pmode, tem);
652 tem = copy_to_reg (tem);
655 /* For __builtin_frame_address, return what we've got. But, on
656 the SPARC for example, we may have to add a bias. */
657 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
658 #ifdef FRAME_ADDR_RTX
659 return FRAME_ADDR_RTX (tem);
660 #else
661 return tem;
662 #endif
664 /* For __builtin_return_address, get the return address from that frame. */
665 #ifdef RETURN_ADDR_RTX
666 tem = RETURN_ADDR_RTX (count, tem);
667 #else
668 tem = memory_address (Pmode,
669 plus_constant (tem, GET_MODE_SIZE (Pmode)));
670 tem = gen_frame_mem (Pmode, tem);
671 #endif
672 return tem;
675 /* Alias set used for setjmp buffer. */
676 static alias_set_type setjmp_alias_set = -1;
678 /* Construct the leading half of a __builtin_setjmp call. Control will
679 return to RECEIVER_LABEL. This is also called directly by the SJLJ
680 exception handling code. */
682 void
683 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
685 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
686 rtx stack_save;
687 rtx mem;
689 if (setjmp_alias_set == -1)
690 setjmp_alias_set = new_alias_set ();
692 buf_addr = convert_memory_address (Pmode, buf_addr);
694 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
696 /* We store the frame pointer and the address of receiver_label in
697 the buffer and use the rest of it for the stack save area, which
698 is machine-dependent. */
700 mem = gen_rtx_MEM (Pmode, buf_addr);
701 set_mem_alias_set (mem, setjmp_alias_set);
702 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
704 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
705 set_mem_alias_set (mem, setjmp_alias_set);
707 emit_move_insn (validize_mem (mem),
708 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
710 stack_save = gen_rtx_MEM (sa_mode,
711 plus_constant (buf_addr,
712 2 * GET_MODE_SIZE (Pmode)));
713 set_mem_alias_set (stack_save, setjmp_alias_set);
714 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
716 /* If there is further processing to do, do it. */
717 #ifdef HAVE_builtin_setjmp_setup
718 if (HAVE_builtin_setjmp_setup)
719 emit_insn (gen_builtin_setjmp_setup (buf_addr));
720 #endif
722 /* Tell optimize_save_area_alloca that extra work is going to
723 need to go on during alloca. */
724 cfun->calls_setjmp = 1;
726 /* We have a nonlocal label. */
727 cfun->has_nonlocal_label = 1;
730 /* Construct the trailing part of a __builtin_setjmp call. This is
731 also called directly by the SJLJ exception handling code. */
733 void
734 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
736 /* Clobber the FP when we get here, so we have to make sure it's
737 marked as used by this function. */
738 emit_use (hard_frame_pointer_rtx);
740 /* Mark the static chain as clobbered here so life information
741 doesn't get messed up for it. */
742 emit_clobber (static_chain_rtx);
744 /* Now put in the code to restore the frame pointer, and argument
745 pointer, if needed. */
746 #ifdef HAVE_nonlocal_goto
747 if (! HAVE_nonlocal_goto)
748 #endif
750 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
751 /* This might change the hard frame pointer in ways that aren't
752 apparent to early optimization passes, so force a clobber. */
753 emit_clobber (hard_frame_pointer_rtx);
756 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
757 if (fixed_regs[ARG_POINTER_REGNUM])
759 #ifdef ELIMINABLE_REGS
760 size_t i;
761 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
763 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
764 if (elim_regs[i].from == ARG_POINTER_REGNUM
765 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
766 break;
768 if (i == ARRAY_SIZE (elim_regs))
769 #endif
771 /* Now restore our arg pointer from the address at which it
772 was saved in our stack frame. */
773 emit_move_insn (crtl->args.internal_arg_pointer,
774 copy_to_reg (get_arg_pointer_save_area ()));
777 #endif
779 #ifdef HAVE_builtin_setjmp_receiver
780 if (HAVE_builtin_setjmp_receiver)
781 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
782 else
783 #endif
784 #ifdef HAVE_nonlocal_goto_receiver
785 if (HAVE_nonlocal_goto_receiver)
786 emit_insn (gen_nonlocal_goto_receiver ());
787 else
788 #endif
789 { /* Nothing */ }
791 /* We must not allow the code we just generated to be reordered by
792 scheduling. Specifically, the update of the frame pointer must
793 happen immediately, not later. */
794 emit_insn (gen_blockage ());
797 /* __builtin_longjmp is passed a pointer to an array of five words (not
798 all will be used on all machines). It operates similarly to the C
799 library function of the same name, but is more efficient. Much of
800 the code below is copied from the handling of non-local gotos. */
802 static void
803 expand_builtin_longjmp (rtx buf_addr, rtx value)
805 rtx fp, lab, stack, insn, last;
806 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
808 /* DRAP is needed for stack realign if longjmp is expanded to current
809 function */
810 if (SUPPORTS_STACK_ALIGNMENT)
811 crtl->need_drap = true;
813 if (setjmp_alias_set == -1)
814 setjmp_alias_set = new_alias_set ();
816 buf_addr = convert_memory_address (Pmode, buf_addr);
818 buf_addr = force_reg (Pmode, buf_addr);
820 /* We used to store value in static_chain_rtx, but that fails if pointers
821 are smaller than integers. We instead require that the user must pass
822 a second argument of 1, because that is what builtin_setjmp will
823 return. This also makes EH slightly more efficient, since we are no
824 longer copying around a value that we don't care about. */
825 gcc_assert (value == const1_rtx);
827 last = get_last_insn ();
828 #ifdef HAVE_builtin_longjmp
829 if (HAVE_builtin_longjmp)
830 emit_insn (gen_builtin_longjmp (buf_addr));
831 else
832 #endif
834 fp = gen_rtx_MEM (Pmode, buf_addr);
835 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
836 GET_MODE_SIZE (Pmode)));
838 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
839 2 * GET_MODE_SIZE (Pmode)));
840 set_mem_alias_set (fp, setjmp_alias_set);
841 set_mem_alias_set (lab, setjmp_alias_set);
842 set_mem_alias_set (stack, setjmp_alias_set);
844 /* Pick up FP, label, and SP from the block and jump. This code is
845 from expand_goto in stmt.c; see there for detailed comments. */
846 #ifdef HAVE_nonlocal_goto
847 if (HAVE_nonlocal_goto)
848 /* We have to pass a value to the nonlocal_goto pattern that will
849 get copied into the static_chain pointer, but it does not matter
850 what that value is, because builtin_setjmp does not use it. */
851 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
852 else
853 #endif
855 lab = copy_to_reg (lab);
857 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
858 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
860 emit_move_insn (hard_frame_pointer_rtx, fp);
861 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
863 emit_use (hard_frame_pointer_rtx);
864 emit_use (stack_pointer_rtx);
865 emit_indirect_jump (lab);
869 /* Search backwards and mark the jump insn as a non-local goto.
870 Note that this precludes the use of __builtin_longjmp to a
871 __builtin_setjmp target in the same function. However, we've
872 already cautioned the user that these functions are for
873 internal exception handling use only. */
874 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
876 gcc_assert (insn != last);
878 if (JUMP_P (insn))
880 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
881 break;
883 else if (CALL_P (insn))
884 break;
888 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
889 and the address of the save area. */
891 static rtx
892 expand_builtin_nonlocal_goto (tree exp)
894 tree t_label, t_save_area;
895 rtx r_label, r_save_area, r_fp, r_sp, insn;
897 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
898 return NULL_RTX;
900 t_label = CALL_EXPR_ARG (exp, 0);
901 t_save_area = CALL_EXPR_ARG (exp, 1);
903 r_label = expand_normal (t_label);
904 r_label = convert_memory_address (Pmode, r_label);
905 r_save_area = expand_normal (t_save_area);
906 r_save_area = convert_memory_address (Pmode, r_save_area);
907 /* Copy the address of the save location to a register just in case it was based
908 on the frame pointer. */
909 r_save_area = copy_to_reg (r_save_area);
910 r_fp = gen_rtx_MEM (Pmode, r_save_area);
911 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
912 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
914 crtl->has_nonlocal_goto = 1;
916 #ifdef HAVE_nonlocal_goto
917 /* ??? We no longer need to pass the static chain value, afaik. */
918 if (HAVE_nonlocal_goto)
919 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
920 else
921 #endif
923 r_label = copy_to_reg (r_label);
925 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
926 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
928 /* Restore frame pointer for containing function.
929 This sets the actual hard register used for the frame pointer
930 to the location of the function's incoming static chain info.
931 The non-local goto handler will then adjust it to contain the
932 proper value and reload the argument pointer, if needed. */
933 emit_move_insn (hard_frame_pointer_rtx, r_fp);
934 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
936 /* USE of hard_frame_pointer_rtx added for consistency;
937 not clear if really needed. */
938 emit_use (hard_frame_pointer_rtx);
939 emit_use (stack_pointer_rtx);
941 /* If the architecture is using a GP register, we must
942 conservatively assume that the target function makes use of it.
943 The prologue of functions with nonlocal gotos must therefore
944 initialize the GP register to the appropriate value, and we
945 must then make sure that this value is live at the point
946 of the jump. (Note that this doesn't necessarily apply
947 to targets with a nonlocal_goto pattern; they are free
948 to implement it in their own way. Note also that this is
949 a no-op if the GP register is a global invariant.) */
950 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
951 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
952 emit_use (pic_offset_table_rtx);
954 emit_indirect_jump (r_label);
957 /* Search backwards to the jump insn and mark it as a
958 non-local goto. */
959 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
961 if (JUMP_P (insn))
963 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
964 break;
966 else if (CALL_P (insn))
967 break;
970 return const0_rtx;
973 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
974 (not all will be used on all machines) that was passed to __builtin_setjmp.
975 It updates the stack pointer in that block to correspond to the current
976 stack pointer. */
978 static void
979 expand_builtin_update_setjmp_buf (rtx buf_addr)
981 enum machine_mode sa_mode = Pmode;
982 rtx stack_save;
985 #ifdef HAVE_save_stack_nonlocal
986 if (HAVE_save_stack_nonlocal)
987 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
988 #endif
989 #ifdef STACK_SAVEAREA_MODE
990 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
991 #endif
993 stack_save
994 = gen_rtx_MEM (sa_mode,
995 memory_address
996 (sa_mode,
997 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
999 #ifdef HAVE_setjmp
1000 if (HAVE_setjmp)
1001 emit_insn (gen_setjmp ());
1002 #endif
1004 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1007 /* Expand a call to __builtin_prefetch. For a target that does not support
1008 data prefetch, evaluate the memory address argument in case it has side
1009 effects. */
1011 static void
1012 expand_builtin_prefetch (tree exp)
1014 tree arg0, arg1, arg2;
1015 int nargs;
1016 rtx op0, op1, op2;
1018 if (!validate_arglist (exp, POINTER_TYPE, 0))
1019 return;
1021 arg0 = CALL_EXPR_ARG (exp, 0);
1023 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1024 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1025 locality). */
1026 nargs = call_expr_nargs (exp);
1027 if (nargs > 1)
1028 arg1 = CALL_EXPR_ARG (exp, 1);
1029 else
1030 arg1 = integer_zero_node;
1031 if (nargs > 2)
1032 arg2 = CALL_EXPR_ARG (exp, 2);
1033 else
1034 arg2 = build_int_cst (NULL_TREE, 3);
1036 /* Argument 0 is an address. */
1037 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1039 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1040 if (TREE_CODE (arg1) != INTEGER_CST)
1042 error ("second argument to %<__builtin_prefetch%> must be a constant");
1043 arg1 = integer_zero_node;
1045 op1 = expand_normal (arg1);
1046 /* Argument 1 must be either zero or one. */
1047 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1049 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1050 " using zero");
1051 op1 = const0_rtx;
1054 /* Argument 2 (locality) must be a compile-time constant int. */
1055 if (TREE_CODE (arg2) != INTEGER_CST)
1057 error ("third argument to %<__builtin_prefetch%> must be a constant");
1058 arg2 = integer_zero_node;
1060 op2 = expand_normal (arg2);
1061 /* Argument 2 must be 0, 1, 2, or 3. */
1062 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1064 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1065 op2 = const0_rtx;
1068 #ifdef HAVE_prefetch
1069 if (HAVE_prefetch)
1071 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1072 (op0,
1073 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1074 || (GET_MODE (op0) != Pmode))
1076 op0 = convert_memory_address (Pmode, op0);
1077 op0 = force_reg (Pmode, op0);
1079 emit_insn (gen_prefetch (op0, op1, op2));
1081 #endif
1083 /* Don't do anything with direct references to volatile memory, but
1084 generate code to handle other side effects. */
1085 if (!MEM_P (op0) && side_effects_p (op0))
1086 emit_insn (op0);
1089 /* Get a MEM rtx for expression EXP which is the address of an operand
1090 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1091 the maximum length of the block of memory that might be accessed or
1092 NULL if unknown. */
1094 static rtx
1095 get_memory_rtx (tree exp, tree len)
1097 tree orig_exp = exp;
1098 rtx addr, mem;
1099 HOST_WIDE_INT off;
1101 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1102 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1103 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1104 exp = TREE_OPERAND (exp, 0);
1106 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1107 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1109 /* Get an expression we can use to find the attributes to assign to MEM.
1110 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1111 we can. First remove any nops. */
1112 while (CONVERT_EXPR_P (exp)
1113 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1114 exp = TREE_OPERAND (exp, 0);
1116 off = 0;
1117 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1118 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1119 && host_integerp (TREE_OPERAND (exp, 1), 0)
1120 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1121 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1122 else if (TREE_CODE (exp) == ADDR_EXPR)
1123 exp = TREE_OPERAND (exp, 0);
1124 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1125 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1126 else
1127 exp = NULL;
1129 /* Honor attributes derived from exp, except for the alias set
1130 (as builtin stringops may alias with anything) and the size
1131 (as stringops may access multiple array elements). */
1132 if (exp)
1134 set_mem_attributes (mem, exp, 0);
1136 if (off)
1137 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1139 /* Allow the string and memory builtins to overflow from one
1140 field into another, see http://gcc.gnu.org/PR23561.
1141 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1142 memory accessed by the string or memory builtin will fit
1143 within the field. */
1144 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1146 tree mem_expr = MEM_EXPR (mem);
1147 HOST_WIDE_INT offset = -1, length = -1;
1148 tree inner = exp;
1150 while (TREE_CODE (inner) == ARRAY_REF
1151 || CONVERT_EXPR_P (inner)
1152 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1153 || TREE_CODE (inner) == SAVE_EXPR)
1154 inner = TREE_OPERAND (inner, 0);
1156 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1158 if (MEM_OFFSET (mem)
1159 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1160 offset = INTVAL (MEM_OFFSET (mem));
1162 if (offset >= 0 && len && host_integerp (len, 0))
1163 length = tree_low_cst (len, 0);
1165 while (TREE_CODE (inner) == COMPONENT_REF)
1167 tree field = TREE_OPERAND (inner, 1);
1168 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1169 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1171 /* Bitfields are generally not byte-addressable. */
1172 gcc_assert (!DECL_BIT_FIELD (field)
1173 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1174 % BITS_PER_UNIT) == 0
1175 && host_integerp (DECL_SIZE (field), 0)
1176 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1177 % BITS_PER_UNIT) == 0));
1179 /* If we can prove that the memory starting at XEXP (mem, 0) and
1180 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1181 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1182 fields without DECL_SIZE_UNIT like flexible array members. */
1183 if (length >= 0
1184 && DECL_SIZE_UNIT (field)
1185 && host_integerp (DECL_SIZE_UNIT (field), 0))
1187 HOST_WIDE_INT size
1188 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1189 if (offset <= size
1190 && length <= size
1191 && offset + length <= size)
1192 break;
1195 if (offset >= 0
1196 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1197 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1198 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 / BITS_PER_UNIT;
1200 else
1202 offset = -1;
1203 length = -1;
1206 mem_expr = TREE_OPERAND (mem_expr, 0);
1207 inner = TREE_OPERAND (inner, 0);
1210 if (mem_expr == NULL)
1211 offset = -1;
1212 if (mem_expr != MEM_EXPR (mem))
1214 set_mem_expr (mem, mem_expr);
1215 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1218 set_mem_alias_set (mem, 0);
1219 set_mem_size (mem, NULL_RTX);
1222 return mem;
1225 /* Built-in functions to perform an untyped call and return. */
1227 /* For each register that may be used for calling a function, this
1228 gives a mode used to copy the register's value. VOIDmode indicates
1229 the register is not used for calling a function. If the machine
1230 has register windows, this gives only the outbound registers.
1231 INCOMING_REGNO gives the corresponding inbound register. */
1232 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1234 /* For each register that may be used for returning values, this gives
1235 a mode used to copy the register's value. VOIDmode indicates the
1236 register is not used for returning values. If the machine has
1237 register windows, this gives only the outbound registers.
1238 INCOMING_REGNO gives the corresponding inbound register. */
1239 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1241 /* For each register that may be used for calling a function, this
1242 gives the offset of that register into the block returned by
1243 __builtin_apply_args. 0 indicates that the register is not
1244 used for calling a function. */
1245 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1247 /* Return the size required for the block returned by __builtin_apply_args,
1248 and initialize apply_args_mode. */
1250 static int
1251 apply_args_size (void)
1253 static int size = -1;
1254 int align;
1255 unsigned int regno;
1256 enum machine_mode mode;
1258 /* The values computed by this function never change. */
1259 if (size < 0)
1261 /* The first value is the incoming arg-pointer. */
1262 size = GET_MODE_SIZE (Pmode);
1264 /* The second value is the structure value address unless this is
1265 passed as an "invisible" first argument. */
1266 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1267 size += GET_MODE_SIZE (Pmode);
1269 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1270 if (FUNCTION_ARG_REGNO_P (regno))
1272 mode = reg_raw_mode[regno];
1274 gcc_assert (mode != VOIDmode);
1276 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1277 if (size % align != 0)
1278 size = CEIL (size, align) * align;
1279 apply_args_reg_offset[regno] = size;
1280 size += GET_MODE_SIZE (mode);
1281 apply_args_mode[regno] = mode;
1283 else
1285 apply_args_mode[regno] = VOIDmode;
1286 apply_args_reg_offset[regno] = 0;
1289 return size;
1292 /* Return the size required for the block returned by __builtin_apply,
1293 and initialize apply_result_mode. */
1295 static int
1296 apply_result_size (void)
1298 static int size = -1;
1299 int align, regno;
1300 enum machine_mode mode;
1302 /* The values computed by this function never change. */
1303 if (size < 0)
1305 size = 0;
1307 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1308 if (FUNCTION_VALUE_REGNO_P (regno))
1310 mode = reg_raw_mode[regno];
1312 gcc_assert (mode != VOIDmode);
1314 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1315 if (size % align != 0)
1316 size = CEIL (size, align) * align;
1317 size += GET_MODE_SIZE (mode);
1318 apply_result_mode[regno] = mode;
1320 else
1321 apply_result_mode[regno] = VOIDmode;
1323 /* Allow targets that use untyped_call and untyped_return to override
1324 the size so that machine-specific information can be stored here. */
1325 #ifdef APPLY_RESULT_SIZE
1326 size = APPLY_RESULT_SIZE;
1327 #endif
1329 return size;
1332 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1333 /* Create a vector describing the result block RESULT. If SAVEP is true,
1334 the result block is used to save the values; otherwise it is used to
1335 restore the values. */
1337 static rtx
1338 result_vector (int savep, rtx result)
1340 int regno, size, align, nelts;
1341 enum machine_mode mode;
1342 rtx reg, mem;
1343 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1345 size = nelts = 0;
1346 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1347 if ((mode = apply_result_mode[regno]) != VOIDmode)
1349 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1350 if (size % align != 0)
1351 size = CEIL (size, align) * align;
1352 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1353 mem = adjust_address (result, mode, size);
1354 savevec[nelts++] = (savep
1355 ? gen_rtx_SET (VOIDmode, mem, reg)
1356 : gen_rtx_SET (VOIDmode, reg, mem));
1357 size += GET_MODE_SIZE (mode);
1359 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1361 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1363 /* Save the state required to perform an untyped call with the same
1364 arguments as were passed to the current function. */
1366 static rtx
1367 expand_builtin_apply_args_1 (void)
1369 rtx registers, tem;
1370 int size, align, regno;
1371 enum machine_mode mode;
1372 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1374 /* Create a block where the arg-pointer, structure value address,
1375 and argument registers can be saved. */
1376 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1378 /* Walk past the arg-pointer and structure value address. */
1379 size = GET_MODE_SIZE (Pmode);
1380 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1381 size += GET_MODE_SIZE (Pmode);
1383 /* Save each register used in calling a function to the block. */
1384 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1385 if ((mode = apply_args_mode[regno]) != VOIDmode)
1387 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1388 if (size % align != 0)
1389 size = CEIL (size, align) * align;
1391 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1393 emit_move_insn (adjust_address (registers, mode, size), tem);
1394 size += GET_MODE_SIZE (mode);
1397 /* Save the arg pointer to the block. */
1398 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1399 #ifdef STACK_GROWS_DOWNWARD
1400 /* We need the pointer as the caller actually passed them to us, not
1401 as we might have pretended they were passed. Make sure it's a valid
1402 operand, as emit_move_insn isn't expected to handle a PLUS. */
1404 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1405 NULL_RTX);
1406 #endif
1407 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1409 size = GET_MODE_SIZE (Pmode);
1411 /* Save the structure value address unless this is passed as an
1412 "invisible" first argument. */
1413 if (struct_incoming_value)
1415 emit_move_insn (adjust_address (registers, Pmode, size),
1416 copy_to_reg (struct_incoming_value));
1417 size += GET_MODE_SIZE (Pmode);
1420 /* Return the address of the block. */
1421 return copy_addr_to_reg (XEXP (registers, 0));
1424 /* __builtin_apply_args returns block of memory allocated on
1425 the stack into which is stored the arg pointer, structure
1426 value address, static chain, and all the registers that might
1427 possibly be used in performing a function call. The code is
1428 moved to the start of the function so the incoming values are
1429 saved. */
1431 static rtx
1432 expand_builtin_apply_args (void)
1434 /* Don't do __builtin_apply_args more than once in a function.
1435 Save the result of the first call and reuse it. */
1436 if (apply_args_value != 0)
1437 return apply_args_value;
1439 /* When this function is called, it means that registers must be
1440 saved on entry to this function. So we migrate the
1441 call to the first insn of this function. */
1442 rtx temp;
1443 rtx seq;
1445 start_sequence ();
1446 temp = expand_builtin_apply_args_1 ();
1447 seq = get_insns ();
1448 end_sequence ();
1450 apply_args_value = temp;
1452 /* Put the insns after the NOTE that starts the function.
1453 If this is inside a start_sequence, make the outer-level insn
1454 chain current, so the code is placed at the start of the
1455 function. If internal_arg_pointer is a non-virtual pseudo,
1456 it needs to be placed after the function that initializes
1457 that pseudo. */
1458 push_topmost_sequence ();
1459 if (REG_P (crtl->args.internal_arg_pointer)
1460 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1461 emit_insn_before (seq, parm_birth_insn);
1462 else
1463 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1464 pop_topmost_sequence ();
1465 return temp;
1469 /* Perform an untyped call and save the state required to perform an
1470 untyped return of whatever value was returned by the given function. */
1472 static rtx
1473 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1475 int size, align, regno;
1476 enum machine_mode mode;
1477 rtx incoming_args, result, reg, dest, src, call_insn;
1478 rtx old_stack_level = 0;
1479 rtx call_fusage = 0;
1480 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1482 arguments = convert_memory_address (Pmode, arguments);
1484 /* Create a block where the return registers can be saved. */
1485 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1487 /* Fetch the arg pointer from the ARGUMENTS block. */
1488 incoming_args = gen_reg_rtx (Pmode);
1489 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1490 #ifndef STACK_GROWS_DOWNWARD
1491 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1492 incoming_args, 0, OPTAB_LIB_WIDEN);
1493 #endif
1495 /* Push a new argument block and copy the arguments. Do not allow
1496 the (potential) memcpy call below to interfere with our stack
1497 manipulations. */
1498 do_pending_stack_adjust ();
1499 NO_DEFER_POP;
1501 /* Save the stack with nonlocal if available. */
1502 #ifdef HAVE_save_stack_nonlocal
1503 if (HAVE_save_stack_nonlocal)
1504 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1505 else
1506 #endif
1507 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1509 /* Allocate a block of memory onto the stack and copy the memory
1510 arguments to the outgoing arguments address. */
1511 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1513 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1514 may have already set current_function_calls_alloca to true.
1515 current_function_calls_alloca won't be set if argsize is zero,
1516 so we have to guarantee need_drap is true here. */
1517 if (SUPPORTS_STACK_ALIGNMENT)
1518 crtl->need_drap = true;
1520 dest = virtual_outgoing_args_rtx;
1521 #ifndef STACK_GROWS_DOWNWARD
1522 if (GET_CODE (argsize) == CONST_INT)
1523 dest = plus_constant (dest, -INTVAL (argsize));
1524 else
1525 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1526 #endif
1527 dest = gen_rtx_MEM (BLKmode, dest);
1528 set_mem_align (dest, PARM_BOUNDARY);
1529 src = gen_rtx_MEM (BLKmode, incoming_args);
1530 set_mem_align (src, PARM_BOUNDARY);
1531 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1533 /* Refer to the argument block. */
1534 apply_args_size ();
1535 arguments = gen_rtx_MEM (BLKmode, arguments);
1536 set_mem_align (arguments, PARM_BOUNDARY);
1538 /* Walk past the arg-pointer and structure value address. */
1539 size = GET_MODE_SIZE (Pmode);
1540 if (struct_value)
1541 size += GET_MODE_SIZE (Pmode);
1543 /* Restore each of the registers previously saved. Make USE insns
1544 for each of these registers for use in making the call. */
1545 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1546 if ((mode = apply_args_mode[regno]) != VOIDmode)
1548 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1549 if (size % align != 0)
1550 size = CEIL (size, align) * align;
1551 reg = gen_rtx_REG (mode, regno);
1552 emit_move_insn (reg, adjust_address (arguments, mode, size));
1553 use_reg (&call_fusage, reg);
1554 size += GET_MODE_SIZE (mode);
1557 /* Restore the structure value address unless this is passed as an
1558 "invisible" first argument. */
1559 size = GET_MODE_SIZE (Pmode);
1560 if (struct_value)
1562 rtx value = gen_reg_rtx (Pmode);
1563 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1564 emit_move_insn (struct_value, value);
1565 if (REG_P (struct_value))
1566 use_reg (&call_fusage, struct_value);
1567 size += GET_MODE_SIZE (Pmode);
1570 /* All arguments and registers used for the call are set up by now! */
1571 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1573 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1574 and we don't want to load it into a register as an optimization,
1575 because prepare_call_address already did it if it should be done. */
1576 if (GET_CODE (function) != SYMBOL_REF)
1577 function = memory_address (FUNCTION_MODE, function);
1579 /* Generate the actual call instruction and save the return value. */
1580 #ifdef HAVE_untyped_call
1581 if (HAVE_untyped_call)
1582 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1583 result, result_vector (1, result)));
1584 else
1585 #endif
1586 #ifdef HAVE_call_value
1587 if (HAVE_call_value)
1589 rtx valreg = 0;
1591 /* Locate the unique return register. It is not possible to
1592 express a call that sets more than one return register using
1593 call_value; use untyped_call for that. In fact, untyped_call
1594 only needs to save the return registers in the given block. */
1595 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1596 if ((mode = apply_result_mode[regno]) != VOIDmode)
1598 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1600 valreg = gen_rtx_REG (mode, regno);
1603 emit_call_insn (GEN_CALL_VALUE (valreg,
1604 gen_rtx_MEM (FUNCTION_MODE, function),
1605 const0_rtx, NULL_RTX, const0_rtx));
1607 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1609 else
1610 #endif
1611 gcc_unreachable ();
1613 /* Find the CALL insn we just emitted, and attach the register usage
1614 information. */
1615 call_insn = last_call_insn ();
1616 add_function_usage_to (call_insn, call_fusage);
1618 /* Restore the stack. */
1619 #ifdef HAVE_save_stack_nonlocal
1620 if (HAVE_save_stack_nonlocal)
1621 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1622 else
1623 #endif
1624 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1626 OK_DEFER_POP;
1628 /* Return the address of the result block. */
1629 result = copy_addr_to_reg (XEXP (result, 0));
1630 return convert_memory_address (ptr_mode, result);
1633 /* Perform an untyped return. */
1635 static void
1636 expand_builtin_return (rtx result)
1638 int size, align, regno;
1639 enum machine_mode mode;
1640 rtx reg;
1641 rtx call_fusage = 0;
1643 result = convert_memory_address (Pmode, result);
1645 apply_result_size ();
1646 result = gen_rtx_MEM (BLKmode, result);
1648 #ifdef HAVE_untyped_return
1649 if (HAVE_untyped_return)
1651 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1652 emit_barrier ();
1653 return;
1655 #endif
1657 /* Restore the return value and note that each value is used. */
1658 size = 0;
1659 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1660 if ((mode = apply_result_mode[regno]) != VOIDmode)
1662 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1663 if (size % align != 0)
1664 size = CEIL (size, align) * align;
1665 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1666 emit_move_insn (reg, adjust_address (result, mode, size));
1668 push_to_sequence (call_fusage);
1669 emit_use (reg);
1670 call_fusage = get_insns ();
1671 end_sequence ();
1672 size += GET_MODE_SIZE (mode);
1675 /* Put the USE insns before the return. */
1676 emit_insn (call_fusage);
1678 /* Return whatever values was restored by jumping directly to the end
1679 of the function. */
1680 expand_naked_return ();
1683 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1685 static enum type_class
1686 type_to_class (tree type)
1688 switch (TREE_CODE (type))
1690 case VOID_TYPE: return void_type_class;
1691 case INTEGER_TYPE: return integer_type_class;
1692 case ENUMERAL_TYPE: return enumeral_type_class;
1693 case BOOLEAN_TYPE: return boolean_type_class;
1694 case POINTER_TYPE: return pointer_type_class;
1695 case REFERENCE_TYPE: return reference_type_class;
1696 case OFFSET_TYPE: return offset_type_class;
1697 case REAL_TYPE: return real_type_class;
1698 case COMPLEX_TYPE: return complex_type_class;
1699 case FUNCTION_TYPE: return function_type_class;
1700 case METHOD_TYPE: return method_type_class;
1701 case RECORD_TYPE: return record_type_class;
1702 case UNION_TYPE:
1703 case QUAL_UNION_TYPE: return union_type_class;
1704 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1705 ? string_type_class : array_type_class);
1706 case LANG_TYPE: return lang_type_class;
1707 default: return no_type_class;
1711 /* Expand a call EXP to __builtin_classify_type. */
1713 static rtx
1714 expand_builtin_classify_type (tree exp)
1716 if (call_expr_nargs (exp))
1717 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1718 return GEN_INT (no_type_class);
1721 /* This helper macro, meant to be used in mathfn_built_in below,
1722 determines which among a set of three builtin math functions is
1723 appropriate for a given type mode. The `F' and `L' cases are
1724 automatically generated from the `double' case. */
1725 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1726 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1727 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1728 fcodel = BUILT_IN_MATHFN##L ; break;
1729 /* Similar to above, but appends _R after any F/L suffix. */
1730 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1731 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1732 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1733 fcodel = BUILT_IN_MATHFN##L_R ; break;
1735 /* Return mathematic function equivalent to FN but operating directly
1736 on TYPE, if available. If IMPLICIT is true find the function in
1737 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1738 can't do the conversion, return zero. */
1740 static tree
1741 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1743 tree const *const fn_arr
1744 = implicit ? implicit_built_in_decls : built_in_decls;
1745 enum built_in_function fcode, fcodef, fcodel;
1747 switch (fn)
1749 CASE_MATHFN (BUILT_IN_ACOS)
1750 CASE_MATHFN (BUILT_IN_ACOSH)
1751 CASE_MATHFN (BUILT_IN_ASIN)
1752 CASE_MATHFN (BUILT_IN_ASINH)
1753 CASE_MATHFN (BUILT_IN_ATAN)
1754 CASE_MATHFN (BUILT_IN_ATAN2)
1755 CASE_MATHFN (BUILT_IN_ATANH)
1756 CASE_MATHFN (BUILT_IN_CBRT)
1757 CASE_MATHFN (BUILT_IN_CEIL)
1758 CASE_MATHFN (BUILT_IN_CEXPI)
1759 CASE_MATHFN (BUILT_IN_COPYSIGN)
1760 CASE_MATHFN (BUILT_IN_COS)
1761 CASE_MATHFN (BUILT_IN_COSH)
1762 CASE_MATHFN (BUILT_IN_DREM)
1763 CASE_MATHFN (BUILT_IN_ERF)
1764 CASE_MATHFN (BUILT_IN_ERFC)
1765 CASE_MATHFN (BUILT_IN_EXP)
1766 CASE_MATHFN (BUILT_IN_EXP10)
1767 CASE_MATHFN (BUILT_IN_EXP2)
1768 CASE_MATHFN (BUILT_IN_EXPM1)
1769 CASE_MATHFN (BUILT_IN_FABS)
1770 CASE_MATHFN (BUILT_IN_FDIM)
1771 CASE_MATHFN (BUILT_IN_FLOOR)
1772 CASE_MATHFN (BUILT_IN_FMA)
1773 CASE_MATHFN (BUILT_IN_FMAX)
1774 CASE_MATHFN (BUILT_IN_FMIN)
1775 CASE_MATHFN (BUILT_IN_FMOD)
1776 CASE_MATHFN (BUILT_IN_FREXP)
1777 CASE_MATHFN (BUILT_IN_GAMMA)
1778 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1779 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1780 CASE_MATHFN (BUILT_IN_HYPOT)
1781 CASE_MATHFN (BUILT_IN_ILOGB)
1782 CASE_MATHFN (BUILT_IN_INF)
1783 CASE_MATHFN (BUILT_IN_ISINF)
1784 CASE_MATHFN (BUILT_IN_J0)
1785 CASE_MATHFN (BUILT_IN_J1)
1786 CASE_MATHFN (BUILT_IN_JN)
1787 CASE_MATHFN (BUILT_IN_LCEIL)
1788 CASE_MATHFN (BUILT_IN_LDEXP)
1789 CASE_MATHFN (BUILT_IN_LFLOOR)
1790 CASE_MATHFN (BUILT_IN_LGAMMA)
1791 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1792 CASE_MATHFN (BUILT_IN_LLCEIL)
1793 CASE_MATHFN (BUILT_IN_LLFLOOR)
1794 CASE_MATHFN (BUILT_IN_LLRINT)
1795 CASE_MATHFN (BUILT_IN_LLROUND)
1796 CASE_MATHFN (BUILT_IN_LOG)
1797 CASE_MATHFN (BUILT_IN_LOG10)
1798 CASE_MATHFN (BUILT_IN_LOG1P)
1799 CASE_MATHFN (BUILT_IN_LOG2)
1800 CASE_MATHFN (BUILT_IN_LOGB)
1801 CASE_MATHFN (BUILT_IN_LRINT)
1802 CASE_MATHFN (BUILT_IN_LROUND)
1803 CASE_MATHFN (BUILT_IN_MODF)
1804 CASE_MATHFN (BUILT_IN_NAN)
1805 CASE_MATHFN (BUILT_IN_NANS)
1806 CASE_MATHFN (BUILT_IN_NEARBYINT)
1807 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1808 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1809 CASE_MATHFN (BUILT_IN_POW)
1810 CASE_MATHFN (BUILT_IN_POWI)
1811 CASE_MATHFN (BUILT_IN_POW10)
1812 CASE_MATHFN (BUILT_IN_REMAINDER)
1813 CASE_MATHFN (BUILT_IN_REMQUO)
1814 CASE_MATHFN (BUILT_IN_RINT)
1815 CASE_MATHFN (BUILT_IN_ROUND)
1816 CASE_MATHFN (BUILT_IN_SCALB)
1817 CASE_MATHFN (BUILT_IN_SCALBLN)
1818 CASE_MATHFN (BUILT_IN_SCALBN)
1819 CASE_MATHFN (BUILT_IN_SIGNBIT)
1820 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1821 CASE_MATHFN (BUILT_IN_SIN)
1822 CASE_MATHFN (BUILT_IN_SINCOS)
1823 CASE_MATHFN (BUILT_IN_SINH)
1824 CASE_MATHFN (BUILT_IN_SQRT)
1825 CASE_MATHFN (BUILT_IN_TAN)
1826 CASE_MATHFN (BUILT_IN_TANH)
1827 CASE_MATHFN (BUILT_IN_TGAMMA)
1828 CASE_MATHFN (BUILT_IN_TRUNC)
1829 CASE_MATHFN (BUILT_IN_Y0)
1830 CASE_MATHFN (BUILT_IN_Y1)
1831 CASE_MATHFN (BUILT_IN_YN)
1833 default:
1834 return NULL_TREE;
1837 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1838 return fn_arr[fcode];
1839 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1840 return fn_arr[fcodef];
1841 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1842 return fn_arr[fcodel];
1843 else
1844 return NULL_TREE;
1847 /* Like mathfn_built_in_1(), but always use the implicit array. */
1849 tree
1850 mathfn_built_in (tree type, enum built_in_function fn)
1852 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1855 /* If errno must be maintained, expand the RTL to check if the result,
1856 TARGET, of a built-in function call, EXP, is NaN, and if so set
1857 errno to EDOM. */
1859 static void
1860 expand_errno_check (tree exp, rtx target)
1862 rtx lab = gen_label_rtx ();
1864 /* Test the result; if it is NaN, set errno=EDOM because
1865 the argument was not in the domain. */
1866 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1867 0, lab);
1869 #ifdef TARGET_EDOM
1870 /* If this built-in doesn't throw an exception, set errno directly. */
1871 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1873 #ifdef GEN_ERRNO_RTX
1874 rtx errno_rtx = GEN_ERRNO_RTX;
1875 #else
1876 rtx errno_rtx
1877 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1878 #endif
1879 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1880 emit_label (lab);
1881 return;
1883 #endif
1885 /* Make sure the library call isn't expanded as a tail call. */
1886 CALL_EXPR_TAILCALL (exp) = 0;
1888 /* We can't set errno=EDOM directly; let the library call do it.
1889 Pop the arguments right away in case the call gets deleted. */
1890 NO_DEFER_POP;
1891 expand_call (exp, target, 0);
1892 OK_DEFER_POP;
1893 emit_label (lab);
1896 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1897 Return NULL_RTX if a normal call should be emitted rather than expanding
1898 the function in-line. EXP is the expression that is a call to the builtin
1899 function; if convenient, the result should be placed in TARGET.
1900 SUBTARGET may be used as the target for computing one of EXP's operands. */
1902 static rtx
1903 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1905 optab builtin_optab;
1906 rtx op0, insns, before_call;
1907 tree fndecl = get_callee_fndecl (exp);
1908 enum machine_mode mode;
1909 bool errno_set = false;
1910 tree arg;
1912 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1913 return NULL_RTX;
1915 arg = CALL_EXPR_ARG (exp, 0);
1917 switch (DECL_FUNCTION_CODE (fndecl))
1919 CASE_FLT_FN (BUILT_IN_SQRT):
1920 errno_set = ! tree_expr_nonnegative_p (arg);
1921 builtin_optab = sqrt_optab;
1922 break;
1923 CASE_FLT_FN (BUILT_IN_EXP):
1924 errno_set = true; builtin_optab = exp_optab; break;
1925 CASE_FLT_FN (BUILT_IN_EXP10):
1926 CASE_FLT_FN (BUILT_IN_POW10):
1927 errno_set = true; builtin_optab = exp10_optab; break;
1928 CASE_FLT_FN (BUILT_IN_EXP2):
1929 errno_set = true; builtin_optab = exp2_optab; break;
1930 CASE_FLT_FN (BUILT_IN_EXPM1):
1931 errno_set = true; builtin_optab = expm1_optab; break;
1932 CASE_FLT_FN (BUILT_IN_LOGB):
1933 errno_set = true; builtin_optab = logb_optab; break;
1934 CASE_FLT_FN (BUILT_IN_LOG):
1935 errno_set = true; builtin_optab = log_optab; break;
1936 CASE_FLT_FN (BUILT_IN_LOG10):
1937 errno_set = true; builtin_optab = log10_optab; break;
1938 CASE_FLT_FN (BUILT_IN_LOG2):
1939 errno_set = true; builtin_optab = log2_optab; break;
1940 CASE_FLT_FN (BUILT_IN_LOG1P):
1941 errno_set = true; builtin_optab = log1p_optab; break;
1942 CASE_FLT_FN (BUILT_IN_ASIN):
1943 builtin_optab = asin_optab; break;
1944 CASE_FLT_FN (BUILT_IN_ACOS):
1945 builtin_optab = acos_optab; break;
1946 CASE_FLT_FN (BUILT_IN_TAN):
1947 builtin_optab = tan_optab; break;
1948 CASE_FLT_FN (BUILT_IN_ATAN):
1949 builtin_optab = atan_optab; break;
1950 CASE_FLT_FN (BUILT_IN_FLOOR):
1951 builtin_optab = floor_optab; break;
1952 CASE_FLT_FN (BUILT_IN_CEIL):
1953 builtin_optab = ceil_optab; break;
1954 CASE_FLT_FN (BUILT_IN_TRUNC):
1955 builtin_optab = btrunc_optab; break;
1956 CASE_FLT_FN (BUILT_IN_ROUND):
1957 builtin_optab = round_optab; break;
1958 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1959 builtin_optab = nearbyint_optab;
1960 if (flag_trapping_math)
1961 break;
1962 /* Else fallthrough and expand as rint. */
1963 CASE_FLT_FN (BUILT_IN_RINT):
1964 builtin_optab = rint_optab; break;
1965 default:
1966 gcc_unreachable ();
1969 /* Make a suitable register to place result in. */
1970 mode = TYPE_MODE (TREE_TYPE (exp));
1972 if (! flag_errno_math || ! HONOR_NANS (mode))
1973 errno_set = false;
1975 /* Before working hard, check whether the instruction is available. */
1976 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1978 target = gen_reg_rtx (mode);
1980 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1981 need to expand the argument again. This way, we will not perform
1982 side-effects more the once. */
1983 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1985 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1987 start_sequence ();
1989 /* Compute into TARGET.
1990 Set TARGET to wherever the result comes back. */
1991 target = expand_unop (mode, builtin_optab, op0, target, 0);
1993 if (target != 0)
1995 if (errno_set)
1996 expand_errno_check (exp, target);
1998 /* Output the entire sequence. */
1999 insns = get_insns ();
2000 end_sequence ();
2001 emit_insn (insns);
2002 return target;
2005 /* If we were unable to expand via the builtin, stop the sequence
2006 (without outputting the insns) and call to the library function
2007 with the stabilized argument list. */
2008 end_sequence ();
2011 before_call = get_last_insn ();
2013 return expand_call (exp, target, target == const0_rtx);
2016 /* Expand a call to the builtin binary math functions (pow and atan2).
2017 Return NULL_RTX if a normal call should be emitted rather than expanding the
2018 function in-line. EXP is the expression that is a call to the builtin
2019 function; if convenient, the result should be placed in TARGET.
2020 SUBTARGET may be used as the target for computing one of EXP's
2021 operands. */
2023 static rtx
2024 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2026 optab builtin_optab;
2027 rtx op0, op1, insns;
2028 int op1_type = REAL_TYPE;
2029 tree fndecl = get_callee_fndecl (exp);
2030 tree arg0, arg1;
2031 enum machine_mode mode;
2032 bool errno_set = true;
2034 switch (DECL_FUNCTION_CODE (fndecl))
2036 CASE_FLT_FN (BUILT_IN_SCALBN):
2037 CASE_FLT_FN (BUILT_IN_SCALBLN):
2038 CASE_FLT_FN (BUILT_IN_LDEXP):
2039 op1_type = INTEGER_TYPE;
2040 default:
2041 break;
2044 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2045 return NULL_RTX;
2047 arg0 = CALL_EXPR_ARG (exp, 0);
2048 arg1 = CALL_EXPR_ARG (exp, 1);
2050 switch (DECL_FUNCTION_CODE (fndecl))
2052 CASE_FLT_FN (BUILT_IN_POW):
2053 builtin_optab = pow_optab; break;
2054 CASE_FLT_FN (BUILT_IN_ATAN2):
2055 builtin_optab = atan2_optab; break;
2056 CASE_FLT_FN (BUILT_IN_SCALB):
2057 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2058 return 0;
2059 builtin_optab = scalb_optab; break;
2060 CASE_FLT_FN (BUILT_IN_SCALBN):
2061 CASE_FLT_FN (BUILT_IN_SCALBLN):
2062 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2063 return 0;
2064 /* Fall through... */
2065 CASE_FLT_FN (BUILT_IN_LDEXP):
2066 builtin_optab = ldexp_optab; break;
2067 CASE_FLT_FN (BUILT_IN_FMOD):
2068 builtin_optab = fmod_optab; break;
2069 CASE_FLT_FN (BUILT_IN_REMAINDER):
2070 CASE_FLT_FN (BUILT_IN_DREM):
2071 builtin_optab = remainder_optab; break;
2072 default:
2073 gcc_unreachable ();
2076 /* Make a suitable register to place result in. */
2077 mode = TYPE_MODE (TREE_TYPE (exp));
2079 /* Before working hard, check whether the instruction is available. */
2080 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2081 return NULL_RTX;
2083 target = gen_reg_rtx (mode);
2085 if (! flag_errno_math || ! HONOR_NANS (mode))
2086 errno_set = false;
2088 /* Always stabilize the argument list. */
2089 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2090 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2092 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2093 op1 = expand_normal (arg1);
2095 start_sequence ();
2097 /* Compute into TARGET.
2098 Set TARGET to wherever the result comes back. */
2099 target = expand_binop (mode, builtin_optab, op0, op1,
2100 target, 0, OPTAB_DIRECT);
2102 /* If we were unable to expand via the builtin, stop the sequence
2103 (without outputting the insns) and call to the library function
2104 with the stabilized argument list. */
2105 if (target == 0)
2107 end_sequence ();
2108 return expand_call (exp, target, target == const0_rtx);
2111 if (errno_set)
2112 expand_errno_check (exp, target);
2114 /* Output the entire sequence. */
2115 insns = get_insns ();
2116 end_sequence ();
2117 emit_insn (insns);
2119 return target;
2122 /* Expand a call to the builtin sin and cos math functions.
2123 Return NULL_RTX if a normal call should be emitted rather than expanding the
2124 function in-line. EXP is the expression that is a call to the builtin
2125 function; if convenient, the result should be placed in TARGET.
2126 SUBTARGET may be used as the target for computing one of EXP's
2127 operands. */
2129 static rtx
2130 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2132 optab builtin_optab;
2133 rtx op0, insns;
2134 tree fndecl = get_callee_fndecl (exp);
2135 enum machine_mode mode;
2136 tree arg;
2138 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2139 return NULL_RTX;
2141 arg = CALL_EXPR_ARG (exp, 0);
2143 switch (DECL_FUNCTION_CODE (fndecl))
2145 CASE_FLT_FN (BUILT_IN_SIN):
2146 CASE_FLT_FN (BUILT_IN_COS):
2147 builtin_optab = sincos_optab; break;
2148 default:
2149 gcc_unreachable ();
2152 /* Make a suitable register to place result in. */
2153 mode = TYPE_MODE (TREE_TYPE (exp));
2155 /* Check if sincos insn is available, otherwise fallback
2156 to sin or cos insn. */
2157 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2158 switch (DECL_FUNCTION_CODE (fndecl))
2160 CASE_FLT_FN (BUILT_IN_SIN):
2161 builtin_optab = sin_optab; break;
2162 CASE_FLT_FN (BUILT_IN_COS):
2163 builtin_optab = cos_optab; break;
2164 default:
2165 gcc_unreachable ();
2168 /* Before working hard, check whether the instruction is available. */
2169 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2171 target = gen_reg_rtx (mode);
2173 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2174 need to expand the argument again. This way, we will not perform
2175 side-effects more the once. */
2176 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2178 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2180 start_sequence ();
2182 /* Compute into TARGET.
2183 Set TARGET to wherever the result comes back. */
2184 if (builtin_optab == sincos_optab)
2186 int result;
2188 switch (DECL_FUNCTION_CODE (fndecl))
2190 CASE_FLT_FN (BUILT_IN_SIN):
2191 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2192 break;
2193 CASE_FLT_FN (BUILT_IN_COS):
2194 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2195 break;
2196 default:
2197 gcc_unreachable ();
2199 gcc_assert (result);
2201 else
2203 target = expand_unop (mode, builtin_optab, op0, target, 0);
2206 if (target != 0)
2208 /* Output the entire sequence. */
2209 insns = get_insns ();
2210 end_sequence ();
2211 emit_insn (insns);
2212 return target;
2215 /* If we were unable to expand via the builtin, stop the sequence
2216 (without outputting the insns) and call to the library function
2217 with the stabilized argument list. */
2218 end_sequence ();
2221 target = expand_call (exp, target, target == const0_rtx);
2223 return target;
2226 /* Expand a call to one of the builtin math functions that operate on
2227 floating point argument and output an integer result (ilogb, isinf,
2228 isnan, etc).
2229 Return 0 if a normal call should be emitted rather than expanding the
2230 function in-line. EXP is the expression that is a call to the builtin
2231 function; if convenient, the result should be placed in TARGET.
2232 SUBTARGET may be used as the target for computing one of EXP's operands. */
2234 static rtx
2235 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2237 optab builtin_optab = 0;
2238 enum insn_code icode = CODE_FOR_nothing;
2239 rtx op0;
2240 tree fndecl = get_callee_fndecl (exp);
2241 enum machine_mode mode;
2242 bool errno_set = false;
2243 tree arg;
2245 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2246 return NULL_RTX;
2248 arg = CALL_EXPR_ARG (exp, 0);
2250 switch (DECL_FUNCTION_CODE (fndecl))
2252 CASE_FLT_FN (BUILT_IN_ILOGB):
2253 errno_set = true; builtin_optab = ilogb_optab; break;
2254 CASE_FLT_FN (BUILT_IN_ISINF):
2255 builtin_optab = isinf_optab; break;
2256 case BUILT_IN_ISNORMAL:
2257 case BUILT_IN_ISFINITE:
2258 CASE_FLT_FN (BUILT_IN_FINITE):
2259 /* These builtins have no optabs (yet). */
2260 break;
2261 default:
2262 gcc_unreachable ();
2265 /* There's no easy way to detect the case we need to set EDOM. */
2266 if (flag_errno_math && errno_set)
2267 return NULL_RTX;
2269 /* Optab mode depends on the mode of the input argument. */
2270 mode = TYPE_MODE (TREE_TYPE (arg));
2272 if (builtin_optab)
2273 icode = optab_handler (builtin_optab, mode)->insn_code;
2275 /* Before working hard, check whether the instruction is available. */
2276 if (icode != CODE_FOR_nothing)
2278 /* Make a suitable register to place result in. */
2279 if (!target
2280 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2281 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2283 gcc_assert (insn_data[icode].operand[0].predicate
2284 (target, GET_MODE (target)));
2286 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2287 need to expand the argument again. This way, we will not perform
2288 side-effects more the once. */
2289 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2291 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2293 if (mode != GET_MODE (op0))
2294 op0 = convert_to_mode (mode, op0, 0);
2296 /* Compute into TARGET.
2297 Set TARGET to wherever the result comes back. */
2298 emit_unop_insn (icode, target, op0, UNKNOWN);
2299 return target;
2302 /* If there is no optab, try generic code. */
2303 switch (DECL_FUNCTION_CODE (fndecl))
2305 tree result;
2307 CASE_FLT_FN (BUILT_IN_ISINF):
2309 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2310 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2311 tree const type = TREE_TYPE (arg);
2312 REAL_VALUE_TYPE r;
2313 char buf[128];
2315 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2316 real_from_string (&r, buf);
2317 result = build_call_expr (isgr_fn, 2,
2318 fold_build1 (ABS_EXPR, type, arg),
2319 build_real (type, r));
2320 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2322 CASE_FLT_FN (BUILT_IN_FINITE):
2323 case BUILT_IN_ISFINITE:
2325 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2326 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2327 tree const type = TREE_TYPE (arg);
2328 REAL_VALUE_TYPE r;
2329 char buf[128];
2331 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2332 real_from_string (&r, buf);
2333 result = build_call_expr (isle_fn, 2,
2334 fold_build1 (ABS_EXPR, type, arg),
2335 build_real (type, r));
2336 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2338 case BUILT_IN_ISNORMAL:
2340 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2341 islessequal(fabs(x),DBL_MAX). */
2342 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2343 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2344 tree const type = TREE_TYPE (arg);
2345 REAL_VALUE_TYPE rmax, rmin;
2346 char buf[128];
2348 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2349 real_from_string (&rmax, buf);
2350 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2351 real_from_string (&rmin, buf);
2352 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2353 result = build_call_expr (isle_fn, 2, arg,
2354 build_real (type, rmax));
2355 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2356 build_call_expr (isge_fn, 2, arg,
2357 build_real (type, rmin)));
2358 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2360 default:
2361 break;
2364 target = expand_call (exp, target, target == const0_rtx);
2366 return target;
2369 /* Expand a call to the builtin sincos math function.
2370 Return NULL_RTX if a normal call should be emitted rather than expanding the
2371 function in-line. EXP is the expression that is a call to the builtin
2372 function. */
2374 static rtx
2375 expand_builtin_sincos (tree exp)
2377 rtx op0, op1, op2, target1, target2;
2378 enum machine_mode mode;
2379 tree arg, sinp, cosp;
2380 int result;
2382 if (!validate_arglist (exp, REAL_TYPE,
2383 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2384 return NULL_RTX;
2386 arg = CALL_EXPR_ARG (exp, 0);
2387 sinp = CALL_EXPR_ARG (exp, 1);
2388 cosp = CALL_EXPR_ARG (exp, 2);
2390 /* Make a suitable register to place result in. */
2391 mode = TYPE_MODE (TREE_TYPE (arg));
2393 /* Check if sincos insn is available, otherwise emit the call. */
2394 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2395 return NULL_RTX;
2397 target1 = gen_reg_rtx (mode);
2398 target2 = gen_reg_rtx (mode);
2400 op0 = expand_normal (arg);
2401 op1 = expand_normal (build_fold_indirect_ref (sinp));
2402 op2 = expand_normal (build_fold_indirect_ref (cosp));
2404 /* Compute into target1 and target2.
2405 Set TARGET to wherever the result comes back. */
2406 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2407 gcc_assert (result);
2409 /* Move target1 and target2 to the memory locations indicated
2410 by op1 and op2. */
2411 emit_move_insn (op1, target1);
2412 emit_move_insn (op2, target2);
2414 return const0_rtx;
2417 /* Expand a call to the internal cexpi builtin to the sincos math function.
2418 EXP is the expression that is a call to the builtin function; if convenient,
2419 the result should be placed in TARGET. SUBTARGET may be used as the target
2420 for computing one of EXP's operands. */
2422 static rtx
2423 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2425 tree fndecl = get_callee_fndecl (exp);
2426 tree arg, type;
2427 enum machine_mode mode;
2428 rtx op0, op1, op2;
2430 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2431 return NULL_RTX;
2433 arg = CALL_EXPR_ARG (exp, 0);
2434 type = TREE_TYPE (arg);
2435 mode = TYPE_MODE (TREE_TYPE (arg));
2437 /* Try expanding via a sincos optab, fall back to emitting a libcall
2438 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2439 is only generated from sincos, cexp or if we have either of them. */
2440 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2442 op1 = gen_reg_rtx (mode);
2443 op2 = gen_reg_rtx (mode);
2445 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2447 /* Compute into op1 and op2. */
2448 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2450 else if (TARGET_HAS_SINCOS)
2452 tree call, fn = NULL_TREE;
2453 tree top1, top2;
2454 rtx op1a, op2a;
2456 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2457 fn = built_in_decls[BUILT_IN_SINCOSF];
2458 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2459 fn = built_in_decls[BUILT_IN_SINCOS];
2460 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2461 fn = built_in_decls[BUILT_IN_SINCOSL];
2462 else
2463 gcc_unreachable ();
2465 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2466 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2467 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2468 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2469 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2470 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2472 /* Make sure not to fold the sincos call again. */
2473 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2474 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2475 call, 3, arg, top1, top2));
2477 else
2479 tree call, fn = NULL_TREE, narg;
2480 tree ctype = build_complex_type (type);
2482 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2483 fn = built_in_decls[BUILT_IN_CEXPF];
2484 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2485 fn = built_in_decls[BUILT_IN_CEXP];
2486 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2487 fn = built_in_decls[BUILT_IN_CEXPL];
2488 else
2489 gcc_unreachable ();
2491 /* If we don't have a decl for cexp create one. This is the
2492 friendliest fallback if the user calls __builtin_cexpi
2493 without full target C99 function support. */
2494 if (fn == NULL_TREE)
2496 tree fntype;
2497 const char *name = NULL;
2499 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2500 name = "cexpf";
2501 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2502 name = "cexp";
2503 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2504 name = "cexpl";
2506 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2507 fn = build_fn_decl (name, fntype);
2510 narg = fold_build2 (COMPLEX_EXPR, ctype,
2511 build_real (type, dconst0), arg);
2513 /* Make sure not to fold the cexp call again. */
2514 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2515 return expand_expr (build_call_nary (ctype, call, 1, narg),
2516 target, VOIDmode, EXPAND_NORMAL);
2519 /* Now build the proper return type. */
2520 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2521 make_tree (TREE_TYPE (arg), op2),
2522 make_tree (TREE_TYPE (arg), op1)),
2523 target, VOIDmode, EXPAND_NORMAL);
2526 /* Expand a call to one of the builtin rounding functions gcc defines
2527 as an extension (lfloor and lceil). As these are gcc extensions we
2528 do not need to worry about setting errno to EDOM.
2529 If expanding via optab fails, lower expression to (int)(floor(x)).
2530 EXP is the expression that is a call to the builtin function;
2531 if convenient, the result should be placed in TARGET. */
2533 static rtx
2534 expand_builtin_int_roundingfn (tree exp, rtx target)
2536 convert_optab builtin_optab;
2537 rtx op0, insns, tmp;
2538 tree fndecl = get_callee_fndecl (exp);
2539 enum built_in_function fallback_fn;
2540 tree fallback_fndecl;
2541 enum machine_mode mode;
2542 tree arg;
2544 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2545 gcc_unreachable ();
2547 arg = CALL_EXPR_ARG (exp, 0);
2549 switch (DECL_FUNCTION_CODE (fndecl))
2551 CASE_FLT_FN (BUILT_IN_LCEIL):
2552 CASE_FLT_FN (BUILT_IN_LLCEIL):
2553 builtin_optab = lceil_optab;
2554 fallback_fn = BUILT_IN_CEIL;
2555 break;
2557 CASE_FLT_FN (BUILT_IN_LFLOOR):
2558 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2559 builtin_optab = lfloor_optab;
2560 fallback_fn = BUILT_IN_FLOOR;
2561 break;
2563 default:
2564 gcc_unreachable ();
2567 /* Make a suitable register to place result in. */
2568 mode = TYPE_MODE (TREE_TYPE (exp));
2570 target = gen_reg_rtx (mode);
2572 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2573 need to expand the argument again. This way, we will not perform
2574 side-effects more the once. */
2575 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2577 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2579 start_sequence ();
2581 /* Compute into TARGET. */
2582 if (expand_sfix_optab (target, op0, builtin_optab))
2584 /* Output the entire sequence. */
2585 insns = get_insns ();
2586 end_sequence ();
2587 emit_insn (insns);
2588 return target;
2591 /* If we were unable to expand via the builtin, stop the sequence
2592 (without outputting the insns). */
2593 end_sequence ();
2595 /* Fall back to floating point rounding optab. */
2596 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2598 /* For non-C99 targets we may end up without a fallback fndecl here
2599 if the user called __builtin_lfloor directly. In this case emit
2600 a call to the floor/ceil variants nevertheless. This should result
2601 in the best user experience for not full C99 targets. */
2602 if (fallback_fndecl == NULL_TREE)
2604 tree fntype;
2605 const char *name = NULL;
2607 switch (DECL_FUNCTION_CODE (fndecl))
2609 case BUILT_IN_LCEIL:
2610 case BUILT_IN_LLCEIL:
2611 name = "ceil";
2612 break;
2613 case BUILT_IN_LCEILF:
2614 case BUILT_IN_LLCEILF:
2615 name = "ceilf";
2616 break;
2617 case BUILT_IN_LCEILL:
2618 case BUILT_IN_LLCEILL:
2619 name = "ceill";
2620 break;
2621 case BUILT_IN_LFLOOR:
2622 case BUILT_IN_LLFLOOR:
2623 name = "floor";
2624 break;
2625 case BUILT_IN_LFLOORF:
2626 case BUILT_IN_LLFLOORF:
2627 name = "floorf";
2628 break;
2629 case BUILT_IN_LFLOORL:
2630 case BUILT_IN_LLFLOORL:
2631 name = "floorl";
2632 break;
2633 default:
2634 gcc_unreachable ();
2637 fntype = build_function_type_list (TREE_TYPE (arg),
2638 TREE_TYPE (arg), NULL_TREE);
2639 fallback_fndecl = build_fn_decl (name, fntype);
2642 exp = build_call_expr (fallback_fndecl, 1, arg);
2644 tmp = expand_normal (exp);
2646 /* Truncate the result of floating point optab to integer
2647 via expand_fix (). */
2648 target = gen_reg_rtx (mode);
2649 expand_fix (target, tmp, 0);
2651 return target;
2654 /* Expand a call to one of the builtin math functions doing integer
2655 conversion (lrint).
2656 Return 0 if a normal call should be emitted rather than expanding the
2657 function in-line. EXP is the expression that is a call to the builtin
2658 function; if convenient, the result should be placed in TARGET. */
2660 static rtx
2661 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2663 convert_optab builtin_optab;
2664 rtx op0, insns;
2665 tree fndecl = get_callee_fndecl (exp);
2666 tree arg;
2667 enum machine_mode mode;
2669 /* There's no easy way to detect the case we need to set EDOM. */
2670 if (flag_errno_math)
2671 return NULL_RTX;
2673 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2674 gcc_unreachable ();
2676 arg = CALL_EXPR_ARG (exp, 0);
2678 switch (DECL_FUNCTION_CODE (fndecl))
2680 CASE_FLT_FN (BUILT_IN_LRINT):
2681 CASE_FLT_FN (BUILT_IN_LLRINT):
2682 builtin_optab = lrint_optab; break;
2683 CASE_FLT_FN (BUILT_IN_LROUND):
2684 CASE_FLT_FN (BUILT_IN_LLROUND):
2685 builtin_optab = lround_optab; break;
2686 default:
2687 gcc_unreachable ();
2690 /* Make a suitable register to place result in. */
2691 mode = TYPE_MODE (TREE_TYPE (exp));
2693 target = gen_reg_rtx (mode);
2695 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2696 need to expand the argument again. This way, we will not perform
2697 side-effects more the once. */
2698 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2700 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2702 start_sequence ();
2704 if (expand_sfix_optab (target, op0, builtin_optab))
2706 /* Output the entire sequence. */
2707 insns = get_insns ();
2708 end_sequence ();
2709 emit_insn (insns);
2710 return target;
2713 /* If we were unable to expand via the builtin, stop the sequence
2714 (without outputting the insns) and call to the library function
2715 with the stabilized argument list. */
2716 end_sequence ();
2718 target = expand_call (exp, target, target == const0_rtx);
2720 return target;
2723 /* To evaluate powi(x,n), the floating point value x raised to the
2724 constant integer exponent n, we use a hybrid algorithm that
2725 combines the "window method" with look-up tables. For an
2726 introduction to exponentiation algorithms and "addition chains",
2727 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2728 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2729 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2730 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2732 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2733 multiplications to inline before calling the system library's pow
2734 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2735 so this default never requires calling pow, powf or powl. */
2737 #ifndef POWI_MAX_MULTS
2738 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2739 #endif
2741 /* The size of the "optimal power tree" lookup table. All
2742 exponents less than this value are simply looked up in the
2743 powi_table below. This threshold is also used to size the
2744 cache of pseudo registers that hold intermediate results. */
2745 #define POWI_TABLE_SIZE 256
2747 /* The size, in bits of the window, used in the "window method"
2748 exponentiation algorithm. This is equivalent to a radix of
2749 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2750 #define POWI_WINDOW_SIZE 3
2752 /* The following table is an efficient representation of an
2753 "optimal power tree". For each value, i, the corresponding
2754 value, j, in the table states than an optimal evaluation
2755 sequence for calculating pow(x,i) can be found by evaluating
2756 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2757 100 integers is given in Knuth's "Seminumerical algorithms". */
2759 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2761 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2762 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2763 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2764 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2765 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2766 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2767 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2768 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2769 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2770 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2771 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2772 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2773 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2774 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2775 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2776 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2777 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2778 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2779 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2780 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2781 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2782 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2783 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2784 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2785 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2786 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2787 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2788 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2789 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2790 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2791 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2792 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2796 /* Return the number of multiplications required to calculate
2797 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2798 subroutine of powi_cost. CACHE is an array indicating
2799 which exponents have already been calculated. */
2801 static int
2802 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2804 /* If we've already calculated this exponent, then this evaluation
2805 doesn't require any additional multiplications. */
2806 if (cache[n])
2807 return 0;
2809 cache[n] = true;
2810 return powi_lookup_cost (n - powi_table[n], cache)
2811 + powi_lookup_cost (powi_table[n], cache) + 1;
2814 /* Return the number of multiplications required to calculate
2815 powi(x,n) for an arbitrary x, given the exponent N. This
2816 function needs to be kept in sync with expand_powi below. */
2818 static int
2819 powi_cost (HOST_WIDE_INT n)
2821 bool cache[POWI_TABLE_SIZE];
2822 unsigned HOST_WIDE_INT digit;
2823 unsigned HOST_WIDE_INT val;
2824 int result;
2826 if (n == 0)
2827 return 0;
2829 /* Ignore the reciprocal when calculating the cost. */
2830 val = (n < 0) ? -n : n;
2832 /* Initialize the exponent cache. */
2833 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2834 cache[1] = true;
2836 result = 0;
2838 while (val >= POWI_TABLE_SIZE)
2840 if (val & 1)
2842 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2843 result += powi_lookup_cost (digit, cache)
2844 + POWI_WINDOW_SIZE + 1;
2845 val >>= POWI_WINDOW_SIZE;
2847 else
2849 val >>= 1;
2850 result++;
2854 return result + powi_lookup_cost (val, cache);
2857 /* Recursive subroutine of expand_powi. This function takes the array,
2858 CACHE, of already calculated exponents and an exponent N and returns
2859 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2861 static rtx
2862 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2864 unsigned HOST_WIDE_INT digit;
2865 rtx target, result;
2866 rtx op0, op1;
2868 if (n < POWI_TABLE_SIZE)
2870 if (cache[n])
2871 return cache[n];
2873 target = gen_reg_rtx (mode);
2874 cache[n] = target;
2876 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2877 op1 = expand_powi_1 (mode, powi_table[n], cache);
2879 else if (n & 1)
2881 target = gen_reg_rtx (mode);
2882 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2883 op0 = expand_powi_1 (mode, n - digit, cache);
2884 op1 = expand_powi_1 (mode, digit, cache);
2886 else
2888 target = gen_reg_rtx (mode);
2889 op0 = expand_powi_1 (mode, n >> 1, cache);
2890 op1 = op0;
2893 result = expand_mult (mode, op0, op1, target, 0);
2894 if (result != target)
2895 emit_move_insn (target, result);
2896 return target;
2899 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2900 floating point operand in mode MODE, and N is the exponent. This
2901 function needs to be kept in sync with powi_cost above. */
2903 static rtx
2904 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2906 unsigned HOST_WIDE_INT val;
2907 rtx cache[POWI_TABLE_SIZE];
2908 rtx result;
2910 if (n == 0)
2911 return CONST1_RTX (mode);
2913 val = (n < 0) ? -n : n;
2915 memset (cache, 0, sizeof (cache));
2916 cache[1] = x;
2918 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2920 /* If the original exponent was negative, reciprocate the result. */
2921 if (n < 0)
2922 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2923 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2925 return result;
2928 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2929 a normal call should be emitted rather than expanding the function
2930 in-line. EXP is the expression that is a call to the builtin
2931 function; if convenient, the result should be placed in TARGET. */
2933 static rtx
2934 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2936 tree arg0, arg1;
2937 tree fn, narg0;
2938 tree type = TREE_TYPE (exp);
2939 REAL_VALUE_TYPE cint, c, c2;
2940 HOST_WIDE_INT n;
2941 rtx op, op2;
2942 enum machine_mode mode = TYPE_MODE (type);
2944 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2945 return NULL_RTX;
2947 arg0 = CALL_EXPR_ARG (exp, 0);
2948 arg1 = CALL_EXPR_ARG (exp, 1);
2950 if (TREE_CODE (arg1) != REAL_CST
2951 || TREE_OVERFLOW (arg1))
2952 return expand_builtin_mathfn_2 (exp, target, subtarget);
2954 /* Handle constant exponents. */
2956 /* For integer valued exponents we can expand to an optimal multiplication
2957 sequence using expand_powi. */
2958 c = TREE_REAL_CST (arg1);
2959 n = real_to_integer (&c);
2960 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2961 if (real_identical (&c, &cint)
2962 && ((n >= -1 && n <= 2)
2963 || (flag_unsafe_math_optimizations
2964 && optimize_insn_for_speed_p ()
2965 && powi_cost (n) <= POWI_MAX_MULTS)))
2967 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2968 if (n != 1)
2970 op = force_reg (mode, op);
2971 op = expand_powi (op, mode, n);
2973 return op;
2976 narg0 = builtin_save_expr (arg0);
2978 /* If the exponent is not integer valued, check if it is half of an integer.
2979 In this case we can expand to sqrt (x) * x**(n/2). */
2980 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2981 if (fn != NULL_TREE)
2983 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2984 n = real_to_integer (&c2);
2985 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2986 if (real_identical (&c2, &cint)
2987 && ((flag_unsafe_math_optimizations
2988 && optimize_insn_for_speed_p ()
2989 && powi_cost (n/2) <= POWI_MAX_MULTS)
2990 || n == 1))
2992 tree call_expr = build_call_expr (fn, 1, narg0);
2993 /* Use expand_expr in case the newly built call expression
2994 was folded to a non-call. */
2995 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2996 if (n != 1)
2998 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2999 op2 = force_reg (mode, op2);
3000 op2 = expand_powi (op2, mode, abs (n / 2));
3001 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3002 0, OPTAB_LIB_WIDEN);
3003 /* If the original exponent was negative, reciprocate the
3004 result. */
3005 if (n < 0)
3006 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3007 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3009 return op;
3013 /* Try if the exponent is a third of an integer. In this case
3014 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3015 different from pow (x, 1./3.) due to rounding and behavior
3016 with negative x we need to constrain this transformation to
3017 unsafe math and positive x or finite math. */
3018 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3019 if (fn != NULL_TREE
3020 && flag_unsafe_math_optimizations
3021 && (tree_expr_nonnegative_p (arg0)
3022 || !HONOR_NANS (mode)))
3024 REAL_VALUE_TYPE dconst3;
3025 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3026 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3027 real_round (&c2, mode, &c2);
3028 n = real_to_integer (&c2);
3029 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3030 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3031 real_convert (&c2, mode, &c2);
3032 if (real_identical (&c2, &c)
3033 && ((optimize_insn_for_speed_p ()
3034 && powi_cost (n/3) <= POWI_MAX_MULTS)
3035 || n == 1))
3037 tree call_expr = build_call_expr (fn, 1,narg0);
3038 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3039 if (abs (n) % 3 == 2)
3040 op = expand_simple_binop (mode, MULT, op, op, op,
3041 0, OPTAB_LIB_WIDEN);
3042 if (n != 1)
3044 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3045 op2 = force_reg (mode, op2);
3046 op2 = expand_powi (op2, mode, abs (n / 3));
3047 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3048 0, OPTAB_LIB_WIDEN);
3049 /* If the original exponent was negative, reciprocate the
3050 result. */
3051 if (n < 0)
3052 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3053 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3055 return op;
3059 /* Fall back to optab expansion. */
3060 return expand_builtin_mathfn_2 (exp, target, subtarget);
3063 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3064 a normal call should be emitted rather than expanding the function
3065 in-line. EXP is the expression that is a call to the builtin
3066 function; if convenient, the result should be placed in TARGET. */
3068 static rtx
3069 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3071 tree arg0, arg1;
3072 rtx op0, op1;
3073 enum machine_mode mode;
3074 enum machine_mode mode2;
3076 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3077 return NULL_RTX;
3079 arg0 = CALL_EXPR_ARG (exp, 0);
3080 arg1 = CALL_EXPR_ARG (exp, 1);
3081 mode = TYPE_MODE (TREE_TYPE (exp));
3083 /* Handle constant power. */
3085 if (TREE_CODE (arg1) == INTEGER_CST
3086 && !TREE_OVERFLOW (arg1))
3088 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3090 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3091 Otherwise, check the number of multiplications required. */
3092 if ((TREE_INT_CST_HIGH (arg1) == 0
3093 || TREE_INT_CST_HIGH (arg1) == -1)
3094 && ((n >= -1 && n <= 2)
3095 || (optimize_insn_for_speed_p ()
3096 && powi_cost (n) <= POWI_MAX_MULTS)))
3098 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3099 op0 = force_reg (mode, op0);
3100 return expand_powi (op0, mode, n);
3104 /* Emit a libcall to libgcc. */
3106 /* Mode of the 2nd argument must match that of an int. */
3107 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3109 if (target == NULL_RTX)
3110 target = gen_reg_rtx (mode);
3112 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3113 if (GET_MODE (op0) != mode)
3114 op0 = convert_to_mode (mode, op0, 0);
3115 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3116 if (GET_MODE (op1) != mode2)
3117 op1 = convert_to_mode (mode2, op1, 0);
3119 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3120 target, LCT_CONST, mode, 2,
3121 op0, mode, op1, mode2);
3123 return target;
3126 /* Expand expression EXP which is a call to the strlen builtin. Return
3127 NULL_RTX if we failed the caller should emit a normal call, otherwise
3128 try to get the result in TARGET, if convenient. */
3130 static rtx
3131 expand_builtin_strlen (tree exp, rtx target,
3132 enum machine_mode target_mode)
3134 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3135 return NULL_RTX;
3136 else
3138 rtx pat;
3139 tree len;
3140 tree src = CALL_EXPR_ARG (exp, 0);
3141 rtx result, src_reg, char_rtx, before_strlen;
3142 enum machine_mode insn_mode = target_mode, char_mode;
3143 enum insn_code icode = CODE_FOR_nothing;
3144 int align;
3146 /* If the length can be computed at compile-time, return it. */
3147 len = c_strlen (src, 0);
3148 if (len)
3149 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3151 /* If the length can be computed at compile-time and is constant
3152 integer, but there are side-effects in src, evaluate
3153 src for side-effects, then return len.
3154 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3155 can be optimized into: i++; x = 3; */
3156 len = c_strlen (src, 1);
3157 if (len && TREE_CODE (len) == INTEGER_CST)
3159 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3160 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3163 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3165 /* If SRC is not a pointer type, don't do this operation inline. */
3166 if (align == 0)
3167 return NULL_RTX;
3169 /* Bail out if we can't compute strlen in the right mode. */
3170 while (insn_mode != VOIDmode)
3172 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3173 if (icode != CODE_FOR_nothing)
3174 break;
3176 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3178 if (insn_mode == VOIDmode)
3179 return NULL_RTX;
3181 /* Make a place to write the result of the instruction. */
3182 result = target;
3183 if (! (result != 0
3184 && REG_P (result)
3185 && GET_MODE (result) == insn_mode
3186 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3187 result = gen_reg_rtx (insn_mode);
3189 /* Make a place to hold the source address. We will not expand
3190 the actual source until we are sure that the expansion will
3191 not fail -- there are trees that cannot be expanded twice. */
3192 src_reg = gen_reg_rtx (Pmode);
3194 /* Mark the beginning of the strlen sequence so we can emit the
3195 source operand later. */
3196 before_strlen = get_last_insn ();
3198 char_rtx = const0_rtx;
3199 char_mode = insn_data[(int) icode].operand[2].mode;
3200 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3201 char_mode))
3202 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3204 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3205 char_rtx, GEN_INT (align));
3206 if (! pat)
3207 return NULL_RTX;
3208 emit_insn (pat);
3210 /* Now that we are assured of success, expand the source. */
3211 start_sequence ();
3212 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3213 if (pat != src_reg)
3214 emit_move_insn (src_reg, pat);
3215 pat = get_insns ();
3216 end_sequence ();
3218 if (before_strlen)
3219 emit_insn_after (pat, before_strlen);
3220 else
3221 emit_insn_before (pat, get_insns ());
3223 /* Return the value in the proper mode for this function. */
3224 if (GET_MODE (result) == target_mode)
3225 target = result;
3226 else if (target != 0)
3227 convert_move (target, result, 0);
3228 else
3229 target = convert_to_mode (target_mode, result, 0);
3231 return target;
3235 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3236 caller should emit a normal call, otherwise try to get the result
3237 in TARGET, if convenient (and in mode MODE if that's convenient). */
3239 static rtx
3240 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3242 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3244 tree type = TREE_TYPE (exp);
3245 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3246 CALL_EXPR_ARG (exp, 1), type);
3247 if (result)
3248 return expand_expr (result, target, mode, EXPAND_NORMAL);
3250 return NULL_RTX;
3253 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3254 caller should emit a normal call, otherwise try to get the result
3255 in TARGET, if convenient (and in mode MODE if that's convenient). */
3257 static rtx
3258 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3260 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3262 tree type = TREE_TYPE (exp);
3263 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3264 CALL_EXPR_ARG (exp, 1), type);
3265 if (result)
3266 return expand_expr (result, target, mode, EXPAND_NORMAL);
3268 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3270 return NULL_RTX;
3273 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3274 caller should emit a normal call, otherwise try to get the result
3275 in TARGET, if convenient (and in mode MODE if that's convenient). */
3277 static rtx
3278 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3280 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3282 tree type = TREE_TYPE (exp);
3283 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3284 CALL_EXPR_ARG (exp, 1), type);
3285 if (result)
3286 return expand_expr (result, target, mode, EXPAND_NORMAL);
3288 return NULL_RTX;
3291 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3292 caller should emit a normal call, otherwise try to get the result
3293 in TARGET, if convenient (and in mode MODE if that's convenient). */
3295 static rtx
3296 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3298 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3300 tree type = TREE_TYPE (exp);
3301 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3302 CALL_EXPR_ARG (exp, 1), type);
3303 if (result)
3304 return expand_expr (result, target, mode, EXPAND_NORMAL);
3306 return NULL_RTX;
3309 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3310 bytes from constant string DATA + OFFSET and return it as target
3311 constant. */
3313 static rtx
3314 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3315 enum machine_mode mode)
3317 const char *str = (const char *) data;
3319 gcc_assert (offset >= 0
3320 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3321 <= strlen (str) + 1));
3323 return c_readstr (str + offset, mode);
3326 /* Expand a call EXP to the memcpy builtin.
3327 Return NULL_RTX if we failed, the caller should emit a normal call,
3328 otherwise try to get the result in TARGET, if convenient (and in
3329 mode MODE if that's convenient). */
3331 static rtx
3332 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3334 tree fndecl = get_callee_fndecl (exp);
3336 if (!validate_arglist (exp,
3337 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3338 return NULL_RTX;
3339 else
3341 tree dest = CALL_EXPR_ARG (exp, 0);
3342 tree src = CALL_EXPR_ARG (exp, 1);
3343 tree len = CALL_EXPR_ARG (exp, 2);
3344 const char *src_str;
3345 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3346 unsigned int dest_align
3347 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3348 rtx dest_mem, src_mem, dest_addr, len_rtx;
3349 tree result = fold_builtin_memory_op (dest, src, len,
3350 TREE_TYPE (TREE_TYPE (fndecl)),
3351 false, /*endp=*/0);
3352 HOST_WIDE_INT expected_size = -1;
3353 unsigned int expected_align = 0;
3354 tree_ann_common_t ann;
3356 if (result)
3358 while (TREE_CODE (result) == COMPOUND_EXPR)
3360 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3361 EXPAND_NORMAL);
3362 result = TREE_OPERAND (result, 1);
3364 return expand_expr (result, target, mode, EXPAND_NORMAL);
3367 /* If DEST is not a pointer type, call the normal function. */
3368 if (dest_align == 0)
3369 return NULL_RTX;
3371 /* If either SRC is not a pointer type, don't do this
3372 operation in-line. */
3373 if (src_align == 0)
3374 return NULL_RTX;
3376 ann = tree_common_ann (exp);
3377 if (ann)
3378 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3380 if (expected_align < dest_align)
3381 expected_align = dest_align;
3382 dest_mem = get_memory_rtx (dest, len);
3383 set_mem_align (dest_mem, dest_align);
3384 len_rtx = expand_normal (len);
3385 src_str = c_getstr (src);
3387 /* If SRC is a string constant and block move would be done
3388 by pieces, we can avoid loading the string from memory
3389 and only stored the computed constants. */
3390 if (src_str
3391 && GET_CODE (len_rtx) == CONST_INT
3392 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3393 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3394 CONST_CAST (char *, src_str),
3395 dest_align, false))
3397 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3398 builtin_memcpy_read_str,
3399 CONST_CAST (char *, src_str),
3400 dest_align, false, 0);
3401 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3402 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3403 return dest_mem;
3406 src_mem = get_memory_rtx (src, len);
3407 set_mem_align (src_mem, src_align);
3409 /* Copy word part most expediently. */
3410 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3411 CALL_EXPR_TAILCALL (exp)
3412 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3413 expected_align, expected_size);
3415 if (dest_addr == 0)
3417 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3418 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3420 return dest_addr;
3424 /* Expand a call EXP to the mempcpy builtin.
3425 Return NULL_RTX if we failed; the caller should emit a normal call,
3426 otherwise try to get the result in TARGET, if convenient (and in
3427 mode MODE if that's convenient). If ENDP is 0 return the
3428 destination pointer, if ENDP is 1 return the end pointer ala
3429 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3430 stpcpy. */
3432 static rtx
3433 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3435 if (!validate_arglist (exp,
3436 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3437 return NULL_RTX;
3438 else
3440 tree dest = CALL_EXPR_ARG (exp, 0);
3441 tree src = CALL_EXPR_ARG (exp, 1);
3442 tree len = CALL_EXPR_ARG (exp, 2);
3443 return expand_builtin_mempcpy_args (dest, src, len,
3444 TREE_TYPE (exp),
3445 target, mode, /*endp=*/ 1);
3449 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3450 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3451 so that this can also be called without constructing an actual CALL_EXPR.
3452 TYPE is the return type of the call. The other arguments and return value
3453 are the same as for expand_builtin_mempcpy. */
3455 static rtx
3456 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3457 rtx target, enum machine_mode mode, int endp)
3459 /* If return value is ignored, transform mempcpy into memcpy. */
3460 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3462 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3463 tree result = build_call_expr (fn, 3, dest, src, len);
3465 while (TREE_CODE (result) == COMPOUND_EXPR)
3467 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3468 EXPAND_NORMAL);
3469 result = TREE_OPERAND (result, 1);
3471 return expand_expr (result, target, mode, EXPAND_NORMAL);
3473 else
3475 const char *src_str;
3476 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3477 unsigned int dest_align
3478 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3479 rtx dest_mem, src_mem, len_rtx;
3480 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3482 if (result)
3484 while (TREE_CODE (result) == COMPOUND_EXPR)
3486 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3487 EXPAND_NORMAL);
3488 result = TREE_OPERAND (result, 1);
3490 return expand_expr (result, target, mode, EXPAND_NORMAL);
3493 /* If either SRC or DEST is not a pointer type, don't do this
3494 operation in-line. */
3495 if (dest_align == 0 || src_align == 0)
3496 return NULL_RTX;
3498 /* If LEN is not constant, call the normal function. */
3499 if (! host_integerp (len, 1))
3500 return NULL_RTX;
3502 len_rtx = expand_normal (len);
3503 src_str = c_getstr (src);
3505 /* If SRC is a string constant and block move would be done
3506 by pieces, we can avoid loading the string from memory
3507 and only stored the computed constants. */
3508 if (src_str
3509 && GET_CODE (len_rtx) == CONST_INT
3510 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3511 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3512 CONST_CAST (char *, src_str),
3513 dest_align, false))
3515 dest_mem = get_memory_rtx (dest, len);
3516 set_mem_align (dest_mem, dest_align);
3517 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3518 builtin_memcpy_read_str,
3519 CONST_CAST (char *, src_str),
3520 dest_align, false, endp);
3521 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3522 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3523 return dest_mem;
3526 if (GET_CODE (len_rtx) == CONST_INT
3527 && can_move_by_pieces (INTVAL (len_rtx),
3528 MIN (dest_align, src_align)))
3530 dest_mem = get_memory_rtx (dest, len);
3531 set_mem_align (dest_mem, dest_align);
3532 src_mem = get_memory_rtx (src, len);
3533 set_mem_align (src_mem, src_align);
3534 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3535 MIN (dest_align, src_align), endp);
3536 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3537 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3538 return dest_mem;
3541 return NULL_RTX;
3545 /* Expand expression EXP, which is a call to the memmove builtin. Return
3546 NULL_RTX if we failed; the caller should emit a normal call. */
3548 static rtx
3549 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3551 if (!validate_arglist (exp,
3552 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3553 return NULL_RTX;
3554 else
3556 tree dest = CALL_EXPR_ARG (exp, 0);
3557 tree src = CALL_EXPR_ARG (exp, 1);
3558 tree len = CALL_EXPR_ARG (exp, 2);
3559 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3560 target, mode, ignore);
3564 /* Helper function to do the actual work for expand_builtin_memmove. The
3565 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3566 so that this can also be called without constructing an actual CALL_EXPR.
3567 TYPE is the return type of the call. The other arguments and return value
3568 are the same as for expand_builtin_memmove. */
3570 static rtx
3571 expand_builtin_memmove_args (tree dest, tree src, tree len,
3572 tree type, rtx target, enum machine_mode mode,
3573 int ignore)
3575 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3577 if (result)
3579 STRIP_TYPE_NOPS (result);
3580 while (TREE_CODE (result) == COMPOUND_EXPR)
3582 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3583 EXPAND_NORMAL);
3584 result = TREE_OPERAND (result, 1);
3586 return expand_expr (result, target, mode, EXPAND_NORMAL);
3589 /* Otherwise, call the normal function. */
3590 return NULL_RTX;
3593 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3594 NULL_RTX if we failed the caller should emit a normal call. */
3596 static rtx
3597 expand_builtin_bcopy (tree exp, int ignore)
3599 tree type = TREE_TYPE (exp);
3600 tree src, dest, size;
3602 if (!validate_arglist (exp,
3603 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3604 return NULL_RTX;
3606 src = CALL_EXPR_ARG (exp, 0);
3607 dest = CALL_EXPR_ARG (exp, 1);
3608 size = CALL_EXPR_ARG (exp, 2);
3610 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3611 This is done this way so that if it isn't expanded inline, we fall
3612 back to calling bcopy instead of memmove. */
3613 return expand_builtin_memmove_args (dest, src,
3614 fold_convert (sizetype, size),
3615 type, const0_rtx, VOIDmode,
3616 ignore);
3619 #ifndef HAVE_movstr
3620 # define HAVE_movstr 0
3621 # define CODE_FOR_movstr CODE_FOR_nothing
3622 #endif
3624 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3625 we failed, the caller should emit a normal call, otherwise try to
3626 get the result in TARGET, if convenient. If ENDP is 0 return the
3627 destination pointer, if ENDP is 1 return the end pointer ala
3628 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3629 stpcpy. */
3631 static rtx
3632 expand_movstr (tree dest, tree src, rtx target, int endp)
3634 rtx end;
3635 rtx dest_mem;
3636 rtx src_mem;
3637 rtx insn;
3638 const struct insn_data * data;
3640 if (!HAVE_movstr)
3641 return NULL_RTX;
3643 dest_mem = get_memory_rtx (dest, NULL);
3644 src_mem = get_memory_rtx (src, NULL);
3645 if (!endp)
3647 target = force_reg (Pmode, XEXP (dest_mem, 0));
3648 dest_mem = replace_equiv_address (dest_mem, target);
3649 end = gen_reg_rtx (Pmode);
3651 else
3653 if (target == 0 || target == const0_rtx)
3655 end = gen_reg_rtx (Pmode);
3656 if (target == 0)
3657 target = end;
3659 else
3660 end = target;
3663 data = insn_data + CODE_FOR_movstr;
3665 if (data->operand[0].mode != VOIDmode)
3666 end = gen_lowpart (data->operand[0].mode, end);
3668 insn = data->genfun (end, dest_mem, src_mem);
3670 gcc_assert (insn);
3672 emit_insn (insn);
3674 /* movstr is supposed to set end to the address of the NUL
3675 terminator. If the caller requested a mempcpy-like return value,
3676 adjust it. */
3677 if (endp == 1 && target != const0_rtx)
3679 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3680 emit_move_insn (target, force_operand (tem, NULL_RTX));
3683 return target;
3686 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3687 NULL_RTX if we failed the caller should emit a normal call, otherwise
3688 try to get the result in TARGET, if convenient (and in mode MODE if that's
3689 convenient). */
3691 static rtx
3692 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3694 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3696 tree dest = CALL_EXPR_ARG (exp, 0);
3697 tree src = CALL_EXPR_ARG (exp, 1);
3698 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3700 return NULL_RTX;
3703 /* Helper function to do the actual work for expand_builtin_strcpy. The
3704 arguments to the builtin_strcpy call DEST and SRC are broken out
3705 so that this can also be called without constructing an actual CALL_EXPR.
3706 The other arguments and return value are the same as for
3707 expand_builtin_strcpy. */
3709 static rtx
3710 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3711 rtx target, enum machine_mode mode)
3713 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3714 if (result)
3715 return expand_expr (result, target, mode, EXPAND_NORMAL);
3716 return expand_movstr (dest, src, target, /*endp=*/0);
3720 /* Expand a call EXP to the stpcpy builtin.
3721 Return NULL_RTX if we failed the caller should emit a normal call,
3722 otherwise try to get the result in TARGET, if convenient (and in
3723 mode MODE if that's convenient). */
3725 static rtx
3726 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3728 tree dst, src;
3730 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3731 return NULL_RTX;
3733 dst = CALL_EXPR_ARG (exp, 0);
3734 src = CALL_EXPR_ARG (exp, 1);
3736 /* If return value is ignored, transform stpcpy into strcpy. */
3737 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3739 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3740 tree result = build_call_expr (fn, 2, dst, src);
3742 STRIP_NOPS (result);
3743 while (TREE_CODE (result) == COMPOUND_EXPR)
3745 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3746 EXPAND_NORMAL);
3747 result = TREE_OPERAND (result, 1);
3749 return expand_expr (result, target, mode, EXPAND_NORMAL);
3751 else
3753 tree len, lenp1;
3754 rtx ret;
3756 /* Ensure we get an actual string whose length can be evaluated at
3757 compile-time, not an expression containing a string. This is
3758 because the latter will potentially produce pessimized code
3759 when used to produce the return value. */
3760 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3761 return expand_movstr (dst, src, target, /*endp=*/2);
3763 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3764 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3765 target, mode, /*endp=*/2);
3767 if (ret)
3768 return ret;
3770 if (TREE_CODE (len) == INTEGER_CST)
3772 rtx len_rtx = expand_normal (len);
3774 if (GET_CODE (len_rtx) == CONST_INT)
3776 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3777 dst, src, target, mode);
3779 if (ret)
3781 if (! target)
3783 if (mode != VOIDmode)
3784 target = gen_reg_rtx (mode);
3785 else
3786 target = gen_reg_rtx (GET_MODE (ret));
3788 if (GET_MODE (target) != GET_MODE (ret))
3789 ret = gen_lowpart (GET_MODE (target), ret);
3791 ret = plus_constant (ret, INTVAL (len_rtx));
3792 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3793 gcc_assert (ret);
3795 return target;
3800 return expand_movstr (dst, src, target, /*endp=*/2);
3804 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3805 bytes from constant string DATA + OFFSET and return it as target
3806 constant. */
3809 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3810 enum machine_mode mode)
3812 const char *str = (const char *) data;
3814 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3815 return const0_rtx;
3817 return c_readstr (str + offset, mode);
3820 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3821 NULL_RTX if we failed the caller should emit a normal call. */
3823 static rtx
3824 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3826 tree fndecl = get_callee_fndecl (exp);
3828 if (validate_arglist (exp,
3829 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3831 tree dest = CALL_EXPR_ARG (exp, 0);
3832 tree src = CALL_EXPR_ARG (exp, 1);
3833 tree len = CALL_EXPR_ARG (exp, 2);
3834 tree slen = c_strlen (src, 1);
3835 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3837 if (result)
3839 while (TREE_CODE (result) == COMPOUND_EXPR)
3841 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3842 EXPAND_NORMAL);
3843 result = TREE_OPERAND (result, 1);
3845 return expand_expr (result, target, mode, EXPAND_NORMAL);
3848 /* We must be passed a constant len and src parameter. */
3849 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3850 return NULL_RTX;
3852 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3854 /* We're required to pad with trailing zeros if the requested
3855 len is greater than strlen(s2)+1. In that case try to
3856 use store_by_pieces, if it fails, punt. */
3857 if (tree_int_cst_lt (slen, len))
3859 unsigned int dest_align
3860 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3861 const char *p = c_getstr (src);
3862 rtx dest_mem;
3864 if (!p || dest_align == 0 || !host_integerp (len, 1)
3865 || !can_store_by_pieces (tree_low_cst (len, 1),
3866 builtin_strncpy_read_str,
3867 CONST_CAST (char *, p),
3868 dest_align, false))
3869 return NULL_RTX;
3871 dest_mem = get_memory_rtx (dest, len);
3872 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3873 builtin_strncpy_read_str,
3874 CONST_CAST (char *, p), dest_align, false, 0);
3875 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3876 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3877 return dest_mem;
3880 return NULL_RTX;
3883 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3884 bytes from constant string DATA + OFFSET and return it as target
3885 constant. */
3888 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3889 enum machine_mode mode)
3891 const char *c = (const char *) data;
3892 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3894 memset (p, *c, GET_MODE_SIZE (mode));
3896 return c_readstr (p, mode);
3899 /* Callback routine for store_by_pieces. Return the RTL of a register
3900 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3901 char value given in the RTL register data. For example, if mode is
3902 4 bytes wide, return the RTL for 0x01010101*data. */
3904 static rtx
3905 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3906 enum machine_mode mode)
3908 rtx target, coeff;
3909 size_t size;
3910 char *p;
3912 size = GET_MODE_SIZE (mode);
3913 if (size == 1)
3914 return (rtx) data;
3916 p = XALLOCAVEC (char, size);
3917 memset (p, 1, size);
3918 coeff = c_readstr (p, mode);
3920 target = convert_to_mode (mode, (rtx) data, 1);
3921 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3922 return force_reg (mode, target);
3925 /* Expand expression EXP, which is a call to the memset builtin. Return
3926 NULL_RTX if we failed the caller should emit a normal call, otherwise
3927 try to get the result in TARGET, if convenient (and in mode MODE if that's
3928 convenient). */
3930 static rtx
3931 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3933 if (!validate_arglist (exp,
3934 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3935 return NULL_RTX;
3936 else
3938 tree dest = CALL_EXPR_ARG (exp, 0);
3939 tree val = CALL_EXPR_ARG (exp, 1);
3940 tree len = CALL_EXPR_ARG (exp, 2);
3941 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3945 /* Helper function to do the actual work for expand_builtin_memset. The
3946 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3947 so that this can also be called without constructing an actual CALL_EXPR.
3948 The other arguments and return value are the same as for
3949 expand_builtin_memset. */
3951 static rtx
3952 expand_builtin_memset_args (tree dest, tree val, tree len,
3953 rtx target, enum machine_mode mode, tree orig_exp)
3955 tree fndecl, fn;
3956 enum built_in_function fcode;
3957 char c;
3958 unsigned int dest_align;
3959 rtx dest_mem, dest_addr, len_rtx;
3960 HOST_WIDE_INT expected_size = -1;
3961 unsigned int expected_align = 0;
3962 tree_ann_common_t ann;
3964 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3966 /* If DEST is not a pointer type, don't do this operation in-line. */
3967 if (dest_align == 0)
3968 return NULL_RTX;
3970 ann = tree_common_ann (orig_exp);
3971 if (ann)
3972 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3974 if (expected_align < dest_align)
3975 expected_align = dest_align;
3977 /* If the LEN parameter is zero, return DEST. */
3978 if (integer_zerop (len))
3980 /* Evaluate and ignore VAL in case it has side-effects. */
3981 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3982 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3985 /* Stabilize the arguments in case we fail. */
3986 dest = builtin_save_expr (dest);
3987 val = builtin_save_expr (val);
3988 len = builtin_save_expr (len);
3990 len_rtx = expand_normal (len);
3991 dest_mem = get_memory_rtx (dest, len);
3993 if (TREE_CODE (val) != INTEGER_CST)
3995 rtx val_rtx;
3997 val_rtx = expand_normal (val);
3998 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3999 val_rtx, 0);
4001 /* Assume that we can memset by pieces if we can store
4002 * the coefficients by pieces (in the required modes).
4003 * We can't pass builtin_memset_gen_str as that emits RTL. */
4004 c = 1;
4005 if (host_integerp (len, 1)
4006 && can_store_by_pieces (tree_low_cst (len, 1),
4007 builtin_memset_read_str, &c, dest_align,
4008 true))
4010 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4011 val_rtx);
4012 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4013 builtin_memset_gen_str, val_rtx, dest_align,
4014 true, 0);
4016 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4017 dest_align, expected_align,
4018 expected_size))
4019 goto do_libcall;
4021 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4022 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4023 return dest_mem;
4026 if (target_char_cast (val, &c))
4027 goto do_libcall;
4029 if (c)
4031 if (host_integerp (len, 1)
4032 && can_store_by_pieces (tree_low_cst (len, 1),
4033 builtin_memset_read_str, &c, dest_align,
4034 true))
4035 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4036 builtin_memset_read_str, &c, dest_align, true, 0);
4037 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4038 dest_align, expected_align,
4039 expected_size))
4040 goto do_libcall;
4042 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4043 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4044 return dest_mem;
4047 set_mem_align (dest_mem, dest_align);
4048 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4049 CALL_EXPR_TAILCALL (orig_exp)
4050 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4051 expected_align, expected_size);
4053 if (dest_addr == 0)
4055 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4056 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4059 return dest_addr;
4061 do_libcall:
4062 fndecl = get_callee_fndecl (orig_exp);
4063 fcode = DECL_FUNCTION_CODE (fndecl);
4064 if (fcode == BUILT_IN_MEMSET)
4065 fn = build_call_expr (fndecl, 3, dest, val, len);
4066 else if (fcode == BUILT_IN_BZERO)
4067 fn = build_call_expr (fndecl, 2, dest, len);
4068 else
4069 gcc_unreachable ();
4070 if (TREE_CODE (fn) == CALL_EXPR)
4071 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4072 return expand_call (fn, target, target == const0_rtx);
4075 /* Expand expression EXP, which is a call to the bzero builtin. Return
4076 NULL_RTX if we failed the caller should emit a normal call. */
4078 static rtx
4079 expand_builtin_bzero (tree exp)
4081 tree dest, size;
4083 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4084 return NULL_RTX;
4086 dest = CALL_EXPR_ARG (exp, 0);
4087 size = CALL_EXPR_ARG (exp, 1);
4089 /* New argument list transforming bzero(ptr x, int y) to
4090 memset(ptr x, int 0, size_t y). This is done this way
4091 so that if it isn't expanded inline, we fallback to
4092 calling bzero instead of memset. */
4094 return expand_builtin_memset_args (dest, integer_zero_node,
4095 fold_convert (sizetype, size),
4096 const0_rtx, VOIDmode, exp);
4099 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4100 caller should emit a normal call, otherwise try to get the result
4101 in TARGET, if convenient (and in mode MODE if that's convenient). */
4103 static rtx
4104 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4106 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4107 INTEGER_TYPE, VOID_TYPE))
4109 tree type = TREE_TYPE (exp);
4110 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4111 CALL_EXPR_ARG (exp, 1),
4112 CALL_EXPR_ARG (exp, 2), type);
4113 if (result)
4114 return expand_expr (result, target, mode, EXPAND_NORMAL);
4116 return NULL_RTX;
4119 /* Expand expression EXP, which is a call to the memcmp built-in function.
4120 Return NULL_RTX if we failed and the
4121 caller should emit a normal call, otherwise try to get the result in
4122 TARGET, if convenient (and in mode MODE, if that's convenient). */
4124 static rtx
4125 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4127 if (!validate_arglist (exp,
4128 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4129 return NULL_RTX;
4130 else
4132 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4133 CALL_EXPR_ARG (exp, 1),
4134 CALL_EXPR_ARG (exp, 2));
4135 if (result)
4136 return expand_expr (result, target, mode, EXPAND_NORMAL);
4139 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4141 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4142 rtx result;
4143 rtx insn;
4144 tree arg1 = CALL_EXPR_ARG (exp, 0);
4145 tree arg2 = CALL_EXPR_ARG (exp, 1);
4146 tree len = CALL_EXPR_ARG (exp, 2);
4148 int arg1_align
4149 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4150 int arg2_align
4151 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4152 enum machine_mode insn_mode;
4154 #ifdef HAVE_cmpmemsi
4155 if (HAVE_cmpmemsi)
4156 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4157 else
4158 #endif
4159 #ifdef HAVE_cmpstrnsi
4160 if (HAVE_cmpstrnsi)
4161 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4162 else
4163 #endif
4164 return NULL_RTX;
4166 /* If we don't have POINTER_TYPE, call the function. */
4167 if (arg1_align == 0 || arg2_align == 0)
4168 return NULL_RTX;
4170 /* Make a place to write the result of the instruction. */
4171 result = target;
4172 if (! (result != 0
4173 && REG_P (result) && GET_MODE (result) == insn_mode
4174 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4175 result = gen_reg_rtx (insn_mode);
4177 arg1_rtx = get_memory_rtx (arg1, len);
4178 arg2_rtx = get_memory_rtx (arg2, len);
4179 arg3_rtx = expand_normal (len);
4181 /* Set MEM_SIZE as appropriate. */
4182 if (GET_CODE (arg3_rtx) == CONST_INT)
4184 set_mem_size (arg1_rtx, arg3_rtx);
4185 set_mem_size (arg2_rtx, arg3_rtx);
4188 #ifdef HAVE_cmpmemsi
4189 if (HAVE_cmpmemsi)
4190 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4191 GEN_INT (MIN (arg1_align, arg2_align)));
4192 else
4193 #endif
4194 #ifdef HAVE_cmpstrnsi
4195 if (HAVE_cmpstrnsi)
4196 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4197 GEN_INT (MIN (arg1_align, arg2_align)));
4198 else
4199 #endif
4200 gcc_unreachable ();
4202 if (insn)
4203 emit_insn (insn);
4204 else
4205 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4206 TYPE_MODE (integer_type_node), 3,
4207 XEXP (arg1_rtx, 0), Pmode,
4208 XEXP (arg2_rtx, 0), Pmode,
4209 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4210 TYPE_UNSIGNED (sizetype)),
4211 TYPE_MODE (sizetype));
4213 /* Return the value in the proper mode for this function. */
4214 mode = TYPE_MODE (TREE_TYPE (exp));
4215 if (GET_MODE (result) == mode)
4216 return result;
4217 else if (target != 0)
4219 convert_move (target, result, 0);
4220 return target;
4222 else
4223 return convert_to_mode (mode, result, 0);
4225 #endif
4227 return NULL_RTX;
4230 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4231 if we failed the caller should emit a normal call, otherwise try to get
4232 the result in TARGET, if convenient. */
4234 static rtx
4235 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4237 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4238 return NULL_RTX;
4239 else
4241 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4242 CALL_EXPR_ARG (exp, 1));
4243 if (result)
4244 return expand_expr (result, target, mode, EXPAND_NORMAL);
4247 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4248 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4249 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4251 rtx arg1_rtx, arg2_rtx;
4252 rtx result, insn = NULL_RTX;
4253 tree fndecl, fn;
4254 tree arg1 = CALL_EXPR_ARG (exp, 0);
4255 tree arg2 = CALL_EXPR_ARG (exp, 1);
4257 int arg1_align
4258 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4259 int arg2_align
4260 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4262 /* If we don't have POINTER_TYPE, call the function. */
4263 if (arg1_align == 0 || arg2_align == 0)
4264 return NULL_RTX;
4266 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4267 arg1 = builtin_save_expr (arg1);
4268 arg2 = builtin_save_expr (arg2);
4270 arg1_rtx = get_memory_rtx (arg1, NULL);
4271 arg2_rtx = get_memory_rtx (arg2, NULL);
4273 #ifdef HAVE_cmpstrsi
4274 /* Try to call cmpstrsi. */
4275 if (HAVE_cmpstrsi)
4277 enum machine_mode insn_mode
4278 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4280 /* Make a place to write the result of the instruction. */
4281 result = target;
4282 if (! (result != 0
4283 && REG_P (result) && GET_MODE (result) == insn_mode
4284 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4285 result = gen_reg_rtx (insn_mode);
4287 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4288 GEN_INT (MIN (arg1_align, arg2_align)));
4290 #endif
4291 #ifdef HAVE_cmpstrnsi
4292 /* Try to determine at least one length and call cmpstrnsi. */
4293 if (!insn && HAVE_cmpstrnsi)
4295 tree len;
4296 rtx arg3_rtx;
4298 enum machine_mode insn_mode
4299 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4300 tree len1 = c_strlen (arg1, 1);
4301 tree len2 = c_strlen (arg2, 1);
4303 if (len1)
4304 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4305 if (len2)
4306 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4308 /* If we don't have a constant length for the first, use the length
4309 of the second, if we know it. We don't require a constant for
4310 this case; some cost analysis could be done if both are available
4311 but neither is constant. For now, assume they're equally cheap,
4312 unless one has side effects. If both strings have constant lengths,
4313 use the smaller. */
4315 if (!len1)
4316 len = len2;
4317 else if (!len2)
4318 len = len1;
4319 else if (TREE_SIDE_EFFECTS (len1))
4320 len = len2;
4321 else if (TREE_SIDE_EFFECTS (len2))
4322 len = len1;
4323 else if (TREE_CODE (len1) != INTEGER_CST)
4324 len = len2;
4325 else if (TREE_CODE (len2) != INTEGER_CST)
4326 len = len1;
4327 else if (tree_int_cst_lt (len1, len2))
4328 len = len1;
4329 else
4330 len = len2;
4332 /* If both arguments have side effects, we cannot optimize. */
4333 if (!len || TREE_SIDE_EFFECTS (len))
4334 goto do_libcall;
4336 arg3_rtx = expand_normal (len);
4338 /* Make a place to write the result of the instruction. */
4339 result = target;
4340 if (! (result != 0
4341 && REG_P (result) && GET_MODE (result) == insn_mode
4342 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4343 result = gen_reg_rtx (insn_mode);
4345 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4346 GEN_INT (MIN (arg1_align, arg2_align)));
4348 #endif
4350 if (insn)
4352 emit_insn (insn);
4354 /* Return the value in the proper mode for this function. */
4355 mode = TYPE_MODE (TREE_TYPE (exp));
4356 if (GET_MODE (result) == mode)
4357 return result;
4358 if (target == 0)
4359 return convert_to_mode (mode, result, 0);
4360 convert_move (target, result, 0);
4361 return target;
4364 /* Expand the library call ourselves using a stabilized argument
4365 list to avoid re-evaluating the function's arguments twice. */
4366 #ifdef HAVE_cmpstrnsi
4367 do_libcall:
4368 #endif
4369 fndecl = get_callee_fndecl (exp);
4370 fn = build_call_expr (fndecl, 2, arg1, arg2);
4371 if (TREE_CODE (fn) == CALL_EXPR)
4372 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4373 return expand_call (fn, target, target == const0_rtx);
4375 #endif
4376 return NULL_RTX;
4379 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4380 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4381 the result in TARGET, if convenient. */
4383 static rtx
4384 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4386 if (!validate_arglist (exp,
4387 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4388 return NULL_RTX;
4389 else
4391 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4392 CALL_EXPR_ARG (exp, 1),
4393 CALL_EXPR_ARG (exp, 2));
4394 if (result)
4395 return expand_expr (result, target, mode, EXPAND_NORMAL);
4398 /* If c_strlen can determine an expression for one of the string
4399 lengths, and it doesn't have side effects, then emit cmpstrnsi
4400 using length MIN(strlen(string)+1, arg3). */
4401 #ifdef HAVE_cmpstrnsi
4402 if (HAVE_cmpstrnsi)
4404 tree len, len1, len2;
4405 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4406 rtx result, insn;
4407 tree fndecl, fn;
4408 tree arg1 = CALL_EXPR_ARG (exp, 0);
4409 tree arg2 = CALL_EXPR_ARG (exp, 1);
4410 tree arg3 = CALL_EXPR_ARG (exp, 2);
4412 int arg1_align
4413 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4414 int arg2_align
4415 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4416 enum machine_mode insn_mode
4417 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4419 len1 = c_strlen (arg1, 1);
4420 len2 = c_strlen (arg2, 1);
4422 if (len1)
4423 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4424 if (len2)
4425 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4427 /* If we don't have a constant length for the first, use the length
4428 of the second, if we know it. We don't require a constant for
4429 this case; some cost analysis could be done if both are available
4430 but neither is constant. For now, assume they're equally cheap,
4431 unless one has side effects. If both strings have constant lengths,
4432 use the smaller. */
4434 if (!len1)
4435 len = len2;
4436 else if (!len2)
4437 len = len1;
4438 else if (TREE_SIDE_EFFECTS (len1))
4439 len = len2;
4440 else if (TREE_SIDE_EFFECTS (len2))
4441 len = len1;
4442 else if (TREE_CODE (len1) != INTEGER_CST)
4443 len = len2;
4444 else if (TREE_CODE (len2) != INTEGER_CST)
4445 len = len1;
4446 else if (tree_int_cst_lt (len1, len2))
4447 len = len1;
4448 else
4449 len = len2;
4451 /* If both arguments have side effects, we cannot optimize. */
4452 if (!len || TREE_SIDE_EFFECTS (len))
4453 return NULL_RTX;
4455 /* The actual new length parameter is MIN(len,arg3). */
4456 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4457 fold_convert (TREE_TYPE (len), arg3));
4459 /* If we don't have POINTER_TYPE, call the function. */
4460 if (arg1_align == 0 || arg2_align == 0)
4461 return NULL_RTX;
4463 /* Make a place to write the result of the instruction. */
4464 result = target;
4465 if (! (result != 0
4466 && REG_P (result) && GET_MODE (result) == insn_mode
4467 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4468 result = gen_reg_rtx (insn_mode);
4470 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4471 arg1 = builtin_save_expr (arg1);
4472 arg2 = builtin_save_expr (arg2);
4473 len = builtin_save_expr (len);
4475 arg1_rtx = get_memory_rtx (arg1, len);
4476 arg2_rtx = get_memory_rtx (arg2, len);
4477 arg3_rtx = expand_normal (len);
4478 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4479 GEN_INT (MIN (arg1_align, arg2_align)));
4480 if (insn)
4482 emit_insn (insn);
4484 /* Return the value in the proper mode for this function. */
4485 mode = TYPE_MODE (TREE_TYPE (exp));
4486 if (GET_MODE (result) == mode)
4487 return result;
4488 if (target == 0)
4489 return convert_to_mode (mode, result, 0);
4490 convert_move (target, result, 0);
4491 return target;
4494 /* Expand the library call ourselves using a stabilized argument
4495 list to avoid re-evaluating the function's arguments twice. */
4496 fndecl = get_callee_fndecl (exp);
4497 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4498 if (TREE_CODE (fn) == CALL_EXPR)
4499 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4500 return expand_call (fn, target, target == const0_rtx);
4502 #endif
4503 return NULL_RTX;
4506 /* Expand expression EXP, which is a call to the strcat builtin.
4507 Return NULL_RTX if we failed the caller should emit a normal call,
4508 otherwise try to get the result in TARGET, if convenient. */
4510 static rtx
4511 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4513 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4514 return NULL_RTX;
4515 else
4517 tree dst = CALL_EXPR_ARG (exp, 0);
4518 tree src = CALL_EXPR_ARG (exp, 1);
4519 const char *p = c_getstr (src);
4521 /* If the string length is zero, return the dst parameter. */
4522 if (p && *p == '\0')
4523 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4525 if (optimize_insn_for_speed_p ())
4527 /* See if we can store by pieces into (dst + strlen(dst)). */
4528 tree newsrc, newdst,
4529 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4530 rtx insns;
4532 /* Stabilize the argument list. */
4533 newsrc = builtin_save_expr (src);
4534 dst = builtin_save_expr (dst);
4536 start_sequence ();
4538 /* Create strlen (dst). */
4539 newdst = build_call_expr (strlen_fn, 1, dst);
4540 /* Create (dst p+ strlen (dst)). */
4542 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4543 newdst = builtin_save_expr (newdst);
4545 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4547 end_sequence (); /* Stop sequence. */
4548 return NULL_RTX;
4551 /* Output the entire sequence. */
4552 insns = get_insns ();
4553 end_sequence ();
4554 emit_insn (insns);
4556 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4559 return NULL_RTX;
4563 /* Expand expression EXP, which is a call to the strncat builtin.
4564 Return NULL_RTX if we failed the caller should emit a normal call,
4565 otherwise try to get the result in TARGET, if convenient. */
4567 static rtx
4568 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4570 if (validate_arglist (exp,
4571 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4573 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4574 CALL_EXPR_ARG (exp, 1),
4575 CALL_EXPR_ARG (exp, 2));
4576 if (result)
4577 return expand_expr (result, target, mode, EXPAND_NORMAL);
4579 return NULL_RTX;
4582 /* Expand expression EXP, which is a call to the strspn builtin.
4583 Return NULL_RTX if we failed the caller should emit a normal call,
4584 otherwise try to get the result in TARGET, if convenient. */
4586 static rtx
4587 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4589 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4591 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4592 CALL_EXPR_ARG (exp, 1));
4593 if (result)
4594 return expand_expr (result, target, mode, EXPAND_NORMAL);
4596 return NULL_RTX;
4599 /* Expand expression EXP, which is a call to the strcspn builtin.
4600 Return NULL_RTX if we failed the caller should emit a normal call,
4601 otherwise try to get the result in TARGET, if convenient. */
4603 static rtx
4604 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4606 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4608 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4609 CALL_EXPR_ARG (exp, 1));
4610 if (result)
4611 return expand_expr (result, target, mode, EXPAND_NORMAL);
4613 return NULL_RTX;
4616 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4617 if that's convenient. */
4620 expand_builtin_saveregs (void)
4622 rtx val, seq;
4624 /* Don't do __builtin_saveregs more than once in a function.
4625 Save the result of the first call and reuse it. */
4626 if (saveregs_value != 0)
4627 return saveregs_value;
4629 /* When this function is called, it means that registers must be
4630 saved on entry to this function. So we migrate the call to the
4631 first insn of this function. */
4633 start_sequence ();
4635 /* Do whatever the machine needs done in this case. */
4636 val = targetm.calls.expand_builtin_saveregs ();
4638 seq = get_insns ();
4639 end_sequence ();
4641 saveregs_value = val;
4643 /* Put the insns after the NOTE that starts the function. If this
4644 is inside a start_sequence, make the outer-level insn chain current, so
4645 the code is placed at the start of the function. */
4646 push_topmost_sequence ();
4647 emit_insn_after (seq, entry_of_function ());
4648 pop_topmost_sequence ();
4650 return val;
4653 /* __builtin_args_info (N) returns word N of the arg space info
4654 for the current function. The number and meanings of words
4655 is controlled by the definition of CUMULATIVE_ARGS. */
4657 static rtx
4658 expand_builtin_args_info (tree exp)
4660 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4661 int *word_ptr = (int *) &crtl->args.info;
4663 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4665 if (call_expr_nargs (exp) != 0)
4667 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4668 error ("argument of %<__builtin_args_info%> must be constant");
4669 else
4671 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4673 if (wordnum < 0 || wordnum >= nwords)
4674 error ("argument of %<__builtin_args_info%> out of range");
4675 else
4676 return GEN_INT (word_ptr[wordnum]);
4679 else
4680 error ("missing argument in %<__builtin_args_info%>");
4682 return const0_rtx;
4685 /* Expand a call to __builtin_next_arg. */
4687 static rtx
4688 expand_builtin_next_arg (void)
4690 /* Checking arguments is already done in fold_builtin_next_arg
4691 that must be called before this function. */
4692 return expand_binop (ptr_mode, add_optab,
4693 crtl->args.internal_arg_pointer,
4694 crtl->args.arg_offset_rtx,
4695 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4698 /* Make it easier for the backends by protecting the valist argument
4699 from multiple evaluations. */
4701 static tree
4702 stabilize_va_list (tree valist, int needs_lvalue)
4704 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4706 gcc_assert (vatype != NULL_TREE);
4708 if (TREE_CODE (vatype) == ARRAY_TYPE)
4710 if (TREE_SIDE_EFFECTS (valist))
4711 valist = save_expr (valist);
4713 /* For this case, the backends will be expecting a pointer to
4714 vatype, but it's possible we've actually been given an array
4715 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4716 So fix it. */
4717 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4719 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4720 valist = build_fold_addr_expr_with_type (valist, p1);
4723 else
4725 tree pt;
4727 if (! needs_lvalue)
4729 if (! TREE_SIDE_EFFECTS (valist))
4730 return valist;
4732 pt = build_pointer_type (vatype);
4733 valist = fold_build1 (ADDR_EXPR, pt, valist);
4734 TREE_SIDE_EFFECTS (valist) = 1;
4737 if (TREE_SIDE_EFFECTS (valist))
4738 valist = save_expr (valist);
4739 valist = build_fold_indirect_ref (valist);
4742 return valist;
4745 /* The "standard" definition of va_list is void*. */
4747 tree
4748 std_build_builtin_va_list (void)
4750 return ptr_type_node;
4753 /* The "standard" abi va_list is va_list_type_node. */
4755 tree
4756 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4758 return va_list_type_node;
4761 /* The "standard" type of va_list is va_list_type_node. */
4763 tree
4764 std_canonical_va_list_type (tree type)
4766 tree wtype, htype;
4768 if (INDIRECT_REF_P (type))
4769 type = TREE_TYPE (type);
4770 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4771 type = TREE_TYPE (type);
4772 wtype = va_list_type_node;
4773 htype = type;
4774 /* Treat structure va_list types. */
4775 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4776 htype = TREE_TYPE (htype);
4777 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4779 /* If va_list is an array type, the argument may have decayed
4780 to a pointer type, e.g. by being passed to another function.
4781 In that case, unwrap both types so that we can compare the
4782 underlying records. */
4783 if (TREE_CODE (htype) == ARRAY_TYPE
4784 || POINTER_TYPE_P (htype))
4786 wtype = TREE_TYPE (wtype);
4787 htype = TREE_TYPE (htype);
4790 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4791 return va_list_type_node;
4793 return NULL_TREE;
4796 /* The "standard" implementation of va_start: just assign `nextarg' to
4797 the variable. */
4799 void
4800 std_expand_builtin_va_start (tree valist, rtx nextarg)
4802 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4803 convert_move (va_r, nextarg, 0);
4806 /* Expand EXP, a call to __builtin_va_start. */
4808 static rtx
4809 expand_builtin_va_start (tree exp)
4811 rtx nextarg;
4812 tree valist;
4814 if (call_expr_nargs (exp) < 2)
4816 error ("too few arguments to function %<va_start%>");
4817 return const0_rtx;
4820 if (fold_builtin_next_arg (exp, true))
4821 return const0_rtx;
4823 nextarg = expand_builtin_next_arg ();
4824 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4826 if (targetm.expand_builtin_va_start)
4827 targetm.expand_builtin_va_start (valist, nextarg);
4828 else
4829 std_expand_builtin_va_start (valist, nextarg);
4831 return const0_rtx;
4834 /* The "standard" implementation of va_arg: read the value from the
4835 current (padded) address and increment by the (padded) size. */
4837 tree
4838 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4839 gimple_seq *post_p)
4841 tree addr, t, type_size, rounded_size, valist_tmp;
4842 unsigned HOST_WIDE_INT align, boundary;
4843 bool indirect;
4845 #ifdef ARGS_GROW_DOWNWARD
4846 /* All of the alignment and movement below is for args-grow-up machines.
4847 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4848 implement their own specialized gimplify_va_arg_expr routines. */
4849 gcc_unreachable ();
4850 #endif
4852 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4853 if (indirect)
4854 type = build_pointer_type (type);
4856 align = PARM_BOUNDARY / BITS_PER_UNIT;
4857 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4859 /* When we align parameter on stack for caller, if the parameter
4860 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4861 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4862 here with caller. */
4863 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4864 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4866 boundary /= BITS_PER_UNIT;
4868 /* Hoist the valist value into a temporary for the moment. */
4869 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4871 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4872 requires greater alignment, we must perform dynamic alignment. */
4873 if (boundary > align
4874 && !integer_zerop (TYPE_SIZE (type)))
4876 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4877 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4878 valist_tmp, size_int (boundary - 1)));
4879 gimplify_and_add (t, pre_p);
4881 t = fold_convert (sizetype, valist_tmp);
4882 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4883 fold_convert (TREE_TYPE (valist),
4884 fold_build2 (BIT_AND_EXPR, sizetype, t,
4885 size_int (-boundary))));
4886 gimplify_and_add (t, pre_p);
4888 else
4889 boundary = align;
4891 /* If the actual alignment is less than the alignment of the type,
4892 adjust the type accordingly so that we don't assume strict alignment
4893 when dereferencing the pointer. */
4894 boundary *= BITS_PER_UNIT;
4895 if (boundary < TYPE_ALIGN (type))
4897 type = build_variant_type_copy (type);
4898 TYPE_ALIGN (type) = boundary;
4901 /* Compute the rounded size of the type. */
4902 type_size = size_in_bytes (type);
4903 rounded_size = round_up (type_size, align);
4905 /* Reduce rounded_size so it's sharable with the postqueue. */
4906 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4908 /* Get AP. */
4909 addr = valist_tmp;
4910 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4912 /* Small args are padded downward. */
4913 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4914 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4915 size_binop (MINUS_EXPR, rounded_size, type_size));
4916 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4919 /* Compute new value for AP. */
4920 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4921 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4922 gimplify_and_add (t, pre_p);
4924 addr = fold_convert (build_pointer_type (type), addr);
4926 if (indirect)
4927 addr = build_va_arg_indirect_ref (addr);
4929 return build_va_arg_indirect_ref (addr);
4932 /* Build an indirect-ref expression over the given TREE, which represents a
4933 piece of a va_arg() expansion. */
4934 tree
4935 build_va_arg_indirect_ref (tree addr)
4937 addr = build_fold_indirect_ref (addr);
4939 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4940 mf_mark (addr);
4942 return addr;
4945 /* Return a dummy expression of type TYPE in order to keep going after an
4946 error. */
4948 static tree
4949 dummy_object (tree type)
4951 tree t = build_int_cst (build_pointer_type (type), 0);
4952 return build1 (INDIRECT_REF, type, t);
4955 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4956 builtin function, but a very special sort of operator. */
4958 enum gimplify_status
4959 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4961 tree promoted_type, have_va_type;
4962 tree valist = TREE_OPERAND (*expr_p, 0);
4963 tree type = TREE_TYPE (*expr_p);
4964 tree t;
4966 /* Verify that valist is of the proper type. */
4967 have_va_type = TREE_TYPE (valist);
4968 if (have_va_type == error_mark_node)
4969 return GS_ERROR;
4970 have_va_type = targetm.canonical_va_list_type (have_va_type);
4972 if (have_va_type == NULL_TREE)
4974 error ("first argument to %<va_arg%> not of type %<va_list%>");
4975 return GS_ERROR;
4978 /* Generate a diagnostic for requesting data of a type that cannot
4979 be passed through `...' due to type promotion at the call site. */
4980 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4981 != type)
4983 static bool gave_help;
4984 bool warned;
4986 /* Unfortunately, this is merely undefined, rather than a constraint
4987 violation, so we cannot make this an error. If this call is never
4988 executed, the program is still strictly conforming. */
4989 warned = warning (0, "%qT is promoted to %qT when passed through %<...%>",
4990 type, promoted_type);
4991 if (!gave_help && warned)
4993 gave_help = true;
4994 inform (input_location, "(so you should pass %qT not %qT to %<va_arg%>)",
4995 promoted_type, type);
4998 /* We can, however, treat "undefined" any way we please.
4999 Call abort to encourage the user to fix the program. */
5000 if (warned)
5001 inform (input_location, "if this code is reached, the program will abort");
5002 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
5003 gimplify_and_add (t, pre_p);
5005 /* This is dead code, but go ahead and finish so that the
5006 mode of the result comes out right. */
5007 *expr_p = dummy_object (type);
5008 return GS_ALL_DONE;
5010 else
5012 /* Make it easier for the backends by protecting the valist argument
5013 from multiple evaluations. */
5014 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5016 /* For this case, the backends will be expecting a pointer to
5017 TREE_TYPE (abi), but it's possible we've
5018 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5019 So fix it. */
5020 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5022 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5023 valist = build_fold_addr_expr_with_type (valist, p1);
5026 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5028 else
5029 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5031 if (!targetm.gimplify_va_arg_expr)
5032 /* FIXME: Once most targets are converted we should merely
5033 assert this is non-null. */
5034 return GS_ALL_DONE;
5036 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5037 return GS_OK;
5041 /* Expand EXP, a call to __builtin_va_end. */
5043 static rtx
5044 expand_builtin_va_end (tree exp)
5046 tree valist = CALL_EXPR_ARG (exp, 0);
5048 /* Evaluate for side effects, if needed. I hate macros that don't
5049 do that. */
5050 if (TREE_SIDE_EFFECTS (valist))
5051 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5053 return const0_rtx;
5056 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5057 builtin rather than just as an assignment in stdarg.h because of the
5058 nastiness of array-type va_list types. */
5060 static rtx
5061 expand_builtin_va_copy (tree exp)
5063 tree dst, src, t;
5065 dst = CALL_EXPR_ARG (exp, 0);
5066 src = CALL_EXPR_ARG (exp, 1);
5068 dst = stabilize_va_list (dst, 1);
5069 src = stabilize_va_list (src, 0);
5071 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5073 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5075 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5076 TREE_SIDE_EFFECTS (t) = 1;
5077 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5079 else
5081 rtx dstb, srcb, size;
5083 /* Evaluate to pointers. */
5084 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5085 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5086 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5087 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5089 dstb = convert_memory_address (Pmode, dstb);
5090 srcb = convert_memory_address (Pmode, srcb);
5092 /* "Dereference" to BLKmode memories. */
5093 dstb = gen_rtx_MEM (BLKmode, dstb);
5094 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5095 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5096 srcb = gen_rtx_MEM (BLKmode, srcb);
5097 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5098 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5100 /* Copy. */
5101 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5104 return const0_rtx;
5107 /* Expand a call to one of the builtin functions __builtin_frame_address or
5108 __builtin_return_address. */
5110 static rtx
5111 expand_builtin_frame_address (tree fndecl, tree exp)
5113 /* The argument must be a nonnegative integer constant.
5114 It counts the number of frames to scan up the stack.
5115 The value is the return address saved in that frame. */
5116 if (call_expr_nargs (exp) == 0)
5117 /* Warning about missing arg was already issued. */
5118 return const0_rtx;
5119 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5121 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5122 error ("invalid argument to %<__builtin_frame_address%>");
5123 else
5124 error ("invalid argument to %<__builtin_return_address%>");
5125 return const0_rtx;
5127 else
5129 rtx tem
5130 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5131 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5133 /* Some ports cannot access arbitrary stack frames. */
5134 if (tem == NULL)
5136 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5137 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5138 else
5139 warning (0, "unsupported argument to %<__builtin_return_address%>");
5140 return const0_rtx;
5143 /* For __builtin_frame_address, return what we've got. */
5144 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5145 return tem;
5147 if (!REG_P (tem)
5148 && ! CONSTANT_P (tem))
5149 tem = copy_to_mode_reg (Pmode, tem);
5150 return tem;
5154 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5155 we failed and the caller should emit a normal call, otherwise try to get
5156 the result in TARGET, if convenient. */
5158 static rtx
5159 expand_builtin_alloca (tree exp, rtx target)
5161 rtx op0;
5162 rtx result;
5164 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5165 should always expand to function calls. These can be intercepted
5166 in libmudflap. */
5167 if (flag_mudflap)
5168 return NULL_RTX;
5170 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5171 return NULL_RTX;
5173 /* Compute the argument. */
5174 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5176 /* Allocate the desired space. */
5177 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5178 result = convert_memory_address (ptr_mode, result);
5180 return result;
5183 /* Expand a call to a bswap builtin with argument ARG0. MODE
5184 is the mode to expand with. */
5186 static rtx
5187 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5189 enum machine_mode mode;
5190 tree arg;
5191 rtx op0;
5193 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5194 return NULL_RTX;
5196 arg = CALL_EXPR_ARG (exp, 0);
5197 mode = TYPE_MODE (TREE_TYPE (arg));
5198 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5200 target = expand_unop (mode, bswap_optab, op0, target, 1);
5202 gcc_assert (target);
5204 return convert_to_mode (mode, target, 0);
5207 /* Expand a call to a unary builtin in EXP.
5208 Return NULL_RTX if a normal call should be emitted rather than expanding the
5209 function in-line. If convenient, the result should be placed in TARGET.
5210 SUBTARGET may be used as the target for computing one of EXP's operands. */
5212 static rtx
5213 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5214 rtx subtarget, optab op_optab)
5216 rtx op0;
5218 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5219 return NULL_RTX;
5221 /* Compute the argument. */
5222 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5223 VOIDmode, EXPAND_NORMAL);
5224 /* Compute op, into TARGET if possible.
5225 Set TARGET to wherever the result comes back. */
5226 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5227 op_optab, op0, target, 1);
5228 gcc_assert (target);
5230 return convert_to_mode (target_mode, target, 0);
5233 /* If the string passed to fputs is a constant and is one character
5234 long, we attempt to transform this call into __builtin_fputc(). */
5236 static rtx
5237 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5239 /* Verify the arguments in the original call. */
5240 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5242 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5243 CALL_EXPR_ARG (exp, 1),
5244 (target == const0_rtx),
5245 unlocked, NULL_TREE);
5246 if (result)
5247 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5249 return NULL_RTX;
5252 /* Expand a call to __builtin_expect. We just return our argument
5253 as the builtin_expect semantic should've been already executed by
5254 tree branch prediction pass. */
5256 static rtx
5257 expand_builtin_expect (tree exp, rtx target)
5259 tree arg, c;
5261 if (call_expr_nargs (exp) < 2)
5262 return const0_rtx;
5263 arg = CALL_EXPR_ARG (exp, 0);
5264 c = CALL_EXPR_ARG (exp, 1);
5266 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5267 /* When guessing was done, the hints should be already stripped away. */
5268 gcc_assert (!flag_guess_branch_prob
5269 || optimize == 0 || errorcount || sorrycount);
5270 return target;
5273 void
5274 expand_builtin_trap (void)
5276 #ifdef HAVE_trap
5277 if (HAVE_trap)
5278 emit_insn (gen_trap ());
5279 else
5280 #endif
5281 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5282 emit_barrier ();
5285 /* Expand EXP, a call to fabs, fabsf or fabsl.
5286 Return NULL_RTX if a normal call should be emitted rather than expanding
5287 the function inline. If convenient, the result should be placed
5288 in TARGET. SUBTARGET may be used as the target for computing
5289 the operand. */
5291 static rtx
5292 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5294 enum machine_mode mode;
5295 tree arg;
5296 rtx op0;
5298 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5299 return NULL_RTX;
5301 arg = CALL_EXPR_ARG (exp, 0);
5302 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5303 mode = TYPE_MODE (TREE_TYPE (arg));
5304 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5305 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5308 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5309 Return NULL is a normal call should be emitted rather than expanding the
5310 function inline. If convenient, the result should be placed in TARGET.
5311 SUBTARGET may be used as the target for computing the operand. */
5313 static rtx
5314 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5316 rtx op0, op1;
5317 tree arg;
5319 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5320 return NULL_RTX;
5322 arg = CALL_EXPR_ARG (exp, 0);
5323 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5325 arg = CALL_EXPR_ARG (exp, 1);
5326 op1 = expand_normal (arg);
5328 return expand_copysign (op0, op1, target);
5331 /* Create a new constant string literal and return a char* pointer to it.
5332 The STRING_CST value is the LEN characters at STR. */
5333 tree
5334 build_string_literal (int len, const char *str)
5336 tree t, elem, index, type;
5338 t = build_string (len, str);
5339 elem = build_type_variant (char_type_node, 1, 0);
5340 index = build_index_type (size_int (len - 1));
5341 type = build_array_type (elem, index);
5342 TREE_TYPE (t) = type;
5343 TREE_CONSTANT (t) = 1;
5344 TREE_READONLY (t) = 1;
5345 TREE_STATIC (t) = 1;
5347 type = build_pointer_type (elem);
5348 t = build1 (ADDR_EXPR, type,
5349 build4 (ARRAY_REF, elem,
5350 t, integer_zero_node, NULL_TREE, NULL_TREE));
5351 return t;
5354 /* Expand EXP, a call to printf or printf_unlocked.
5355 Return NULL_RTX if a normal call should be emitted rather than transforming
5356 the function inline. If convenient, the result should be placed in
5357 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5358 call. */
5359 static rtx
5360 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5361 bool unlocked)
5363 /* If we're using an unlocked function, assume the other unlocked
5364 functions exist explicitly. */
5365 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5366 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5367 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5368 : implicit_built_in_decls[BUILT_IN_PUTS];
5369 const char *fmt_str;
5370 tree fn = 0;
5371 tree fmt, arg;
5372 int nargs = call_expr_nargs (exp);
5374 /* If the return value is used, don't do the transformation. */
5375 if (target != const0_rtx)
5376 return NULL_RTX;
5378 /* Verify the required arguments in the original call. */
5379 if (nargs == 0)
5380 return NULL_RTX;
5381 fmt = CALL_EXPR_ARG (exp, 0);
5382 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5383 return NULL_RTX;
5385 /* Check whether the format is a literal string constant. */
5386 fmt_str = c_getstr (fmt);
5387 if (fmt_str == NULL)
5388 return NULL_RTX;
5390 if (!init_target_chars ())
5391 return NULL_RTX;
5393 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5394 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5396 if ((nargs != 2)
5397 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5398 return NULL_RTX;
5399 if (fn_puts)
5400 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5402 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5403 else if (strcmp (fmt_str, target_percent_c) == 0)
5405 if ((nargs != 2)
5406 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5407 return NULL_RTX;
5408 if (fn_putchar)
5409 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5411 else
5413 /* We can't handle anything else with % args or %% ... yet. */
5414 if (strchr (fmt_str, target_percent))
5415 return NULL_RTX;
5417 if (nargs > 1)
5418 return NULL_RTX;
5420 /* If the format specifier was "", printf does nothing. */
5421 if (fmt_str[0] == '\0')
5422 return const0_rtx;
5423 /* If the format specifier has length of 1, call putchar. */
5424 if (fmt_str[1] == '\0')
5426 /* Given printf("c"), (where c is any one character,)
5427 convert "c"[0] to an int and pass that to the replacement
5428 function. */
5429 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5430 if (fn_putchar)
5431 fn = build_call_expr (fn_putchar, 1, arg);
5433 else
5435 /* If the format specifier was "string\n", call puts("string"). */
5436 size_t len = strlen (fmt_str);
5437 if ((unsigned char)fmt_str[len - 1] == target_newline)
5439 /* Create a NUL-terminated string that's one char shorter
5440 than the original, stripping off the trailing '\n'. */
5441 char *newstr = XALLOCAVEC (char, len);
5442 memcpy (newstr, fmt_str, len - 1);
5443 newstr[len - 1] = 0;
5444 arg = build_string_literal (len, newstr);
5445 if (fn_puts)
5446 fn = build_call_expr (fn_puts, 1, arg);
5448 else
5449 /* We'd like to arrange to call fputs(string,stdout) here,
5450 but we need stdout and don't have a way to get it yet. */
5451 return NULL_RTX;
5455 if (!fn)
5456 return NULL_RTX;
5457 if (TREE_CODE (fn) == CALL_EXPR)
5458 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5459 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5462 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5463 Return NULL_RTX if a normal call should be emitted rather than transforming
5464 the function inline. If convenient, the result should be placed in
5465 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5466 call. */
5467 static rtx
5468 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5469 bool unlocked)
5471 /* If we're using an unlocked function, assume the other unlocked
5472 functions exist explicitly. */
5473 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5474 : implicit_built_in_decls[BUILT_IN_FPUTC];
5475 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5476 : implicit_built_in_decls[BUILT_IN_FPUTS];
5477 const char *fmt_str;
5478 tree fn = 0;
5479 tree fmt, fp, arg;
5480 int nargs = call_expr_nargs (exp);
5482 /* If the return value is used, don't do the transformation. */
5483 if (target != const0_rtx)
5484 return NULL_RTX;
5486 /* Verify the required arguments in the original call. */
5487 if (nargs < 2)
5488 return NULL_RTX;
5489 fp = CALL_EXPR_ARG (exp, 0);
5490 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5491 return NULL_RTX;
5492 fmt = CALL_EXPR_ARG (exp, 1);
5493 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5494 return NULL_RTX;
5496 /* Check whether the format is a literal string constant. */
5497 fmt_str = c_getstr (fmt);
5498 if (fmt_str == NULL)
5499 return NULL_RTX;
5501 if (!init_target_chars ())
5502 return NULL_RTX;
5504 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5505 if (strcmp (fmt_str, target_percent_s) == 0)
5507 if ((nargs != 3)
5508 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5509 return NULL_RTX;
5510 arg = CALL_EXPR_ARG (exp, 2);
5511 if (fn_fputs)
5512 fn = build_call_expr (fn_fputs, 2, arg, fp);
5514 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5515 else if (strcmp (fmt_str, target_percent_c) == 0)
5517 if ((nargs != 3)
5518 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5519 return NULL_RTX;
5520 arg = CALL_EXPR_ARG (exp, 2);
5521 if (fn_fputc)
5522 fn = build_call_expr (fn_fputc, 2, arg, fp);
5524 else
5526 /* We can't handle anything else with % args or %% ... yet. */
5527 if (strchr (fmt_str, target_percent))
5528 return NULL_RTX;
5530 if (nargs > 2)
5531 return NULL_RTX;
5533 /* If the format specifier was "", fprintf does nothing. */
5534 if (fmt_str[0] == '\0')
5536 /* Evaluate and ignore FILE* argument for side-effects. */
5537 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5538 return const0_rtx;
5541 /* When "string" doesn't contain %, replace all cases of
5542 fprintf(stream,string) with fputs(string,stream). The fputs
5543 builtin will take care of special cases like length == 1. */
5544 if (fn_fputs)
5545 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5548 if (!fn)
5549 return NULL_RTX;
5550 if (TREE_CODE (fn) == CALL_EXPR)
5551 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5552 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5555 /* Expand a call EXP to sprintf. Return NULL_RTX if
5556 a normal call should be emitted rather than expanding the function
5557 inline. If convenient, the result should be placed in TARGET with
5558 mode MODE. */
5560 static rtx
5561 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5563 tree dest, fmt;
5564 const char *fmt_str;
5565 int nargs = call_expr_nargs (exp);
5567 /* Verify the required arguments in the original call. */
5568 if (nargs < 2)
5569 return NULL_RTX;
5570 dest = CALL_EXPR_ARG (exp, 0);
5571 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5572 return NULL_RTX;
5573 fmt = CALL_EXPR_ARG (exp, 0);
5574 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5575 return NULL_RTX;
5577 /* Check whether the format is a literal string constant. */
5578 fmt_str = c_getstr (fmt);
5579 if (fmt_str == NULL)
5580 return NULL_RTX;
5582 if (!init_target_chars ())
5583 return NULL_RTX;
5585 /* If the format doesn't contain % args or %%, use strcpy. */
5586 if (strchr (fmt_str, target_percent) == 0)
5588 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5589 tree exp;
5591 if ((nargs > 2) || ! fn)
5592 return NULL_RTX;
5593 expand_expr (build_call_expr (fn, 2, dest, fmt),
5594 const0_rtx, VOIDmode, EXPAND_NORMAL);
5595 if (target == const0_rtx)
5596 return const0_rtx;
5597 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5598 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5600 /* If the format is "%s", use strcpy if the result isn't used. */
5601 else if (strcmp (fmt_str, target_percent_s) == 0)
5603 tree fn, arg, len;
5604 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5606 if (! fn)
5607 return NULL_RTX;
5608 if (nargs != 3)
5609 return NULL_RTX;
5610 arg = CALL_EXPR_ARG (exp, 2);
5611 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5612 return NULL_RTX;
5614 if (target != const0_rtx)
5616 len = c_strlen (arg, 1);
5617 if (! len || TREE_CODE (len) != INTEGER_CST)
5618 return NULL_RTX;
5620 else
5621 len = NULL_TREE;
5623 expand_expr (build_call_expr (fn, 2, dest, arg),
5624 const0_rtx, VOIDmode, EXPAND_NORMAL);
5626 if (target == const0_rtx)
5627 return const0_rtx;
5628 return expand_expr (len, target, mode, EXPAND_NORMAL);
5631 return NULL_RTX;
5634 /* Expand a call to either the entry or exit function profiler. */
5636 static rtx
5637 expand_builtin_profile_func (bool exitp)
5639 rtx this_rtx, which;
5641 this_rtx = DECL_RTL (current_function_decl);
5642 gcc_assert (MEM_P (this_rtx));
5643 this_rtx = XEXP (this_rtx, 0);
5645 if (exitp)
5646 which = profile_function_exit_libfunc;
5647 else
5648 which = profile_function_entry_libfunc;
5650 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5651 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5653 Pmode);
5655 return const0_rtx;
5658 /* Expand a call to __builtin___clear_cache. */
5660 static rtx
5661 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5663 #ifndef HAVE_clear_cache
5664 #ifdef CLEAR_INSN_CACHE
5665 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5666 does something. Just do the default expansion to a call to
5667 __clear_cache(). */
5668 return NULL_RTX;
5669 #else
5670 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5671 does nothing. There is no need to call it. Do nothing. */
5672 return const0_rtx;
5673 #endif /* CLEAR_INSN_CACHE */
5674 #else
5675 /* We have a "clear_cache" insn, and it will handle everything. */
5676 tree begin, end;
5677 rtx begin_rtx, end_rtx;
5678 enum insn_code icode;
5680 /* We must not expand to a library call. If we did, any
5681 fallback library function in libgcc that might contain a call to
5682 __builtin___clear_cache() would recurse infinitely. */
5683 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5685 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5686 return const0_rtx;
5689 if (HAVE_clear_cache)
5691 icode = CODE_FOR_clear_cache;
5693 begin = CALL_EXPR_ARG (exp, 0);
5694 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5695 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5696 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5697 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5699 end = CALL_EXPR_ARG (exp, 1);
5700 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5701 end_rtx = convert_memory_address (Pmode, end_rtx);
5702 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5703 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5705 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5707 return const0_rtx;
5708 #endif /* HAVE_clear_cache */
5711 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5713 static rtx
5714 round_trampoline_addr (rtx tramp)
5716 rtx temp, addend, mask;
5718 /* If we don't need too much alignment, we'll have been guaranteed
5719 proper alignment by get_trampoline_type. */
5720 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5721 return tramp;
5723 /* Round address up to desired boundary. */
5724 temp = gen_reg_rtx (Pmode);
5725 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5726 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5728 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5729 temp, 0, OPTAB_LIB_WIDEN);
5730 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5731 temp, 0, OPTAB_LIB_WIDEN);
5733 return tramp;
5736 static rtx
5737 expand_builtin_init_trampoline (tree exp)
5739 tree t_tramp, t_func, t_chain;
5740 rtx r_tramp, r_func, r_chain;
5741 #ifdef TRAMPOLINE_TEMPLATE
5742 rtx blktramp;
5743 #endif
5745 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5746 POINTER_TYPE, VOID_TYPE))
5747 return NULL_RTX;
5749 t_tramp = CALL_EXPR_ARG (exp, 0);
5750 t_func = CALL_EXPR_ARG (exp, 1);
5751 t_chain = CALL_EXPR_ARG (exp, 2);
5753 r_tramp = expand_normal (t_tramp);
5754 r_func = expand_normal (t_func);
5755 r_chain = expand_normal (t_chain);
5757 /* Generate insns to initialize the trampoline. */
5758 r_tramp = round_trampoline_addr (r_tramp);
5759 #ifdef TRAMPOLINE_TEMPLATE
5760 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5761 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5762 emit_block_move (blktramp, assemble_trampoline_template (),
5763 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5764 #endif
5765 trampolines_created = 1;
5766 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5768 return const0_rtx;
5771 static rtx
5772 expand_builtin_adjust_trampoline (tree exp)
5774 rtx tramp;
5776 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5777 return NULL_RTX;
5779 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5780 tramp = round_trampoline_addr (tramp);
5781 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5782 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5783 #endif
5785 return tramp;
5788 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5789 function. The function first checks whether the back end provides
5790 an insn to implement signbit for the respective mode. If not, it
5791 checks whether the floating point format of the value is such that
5792 the sign bit can be extracted. If that is not the case, the
5793 function returns NULL_RTX to indicate that a normal call should be
5794 emitted rather than expanding the function in-line. EXP is the
5795 expression that is a call to the builtin function; if convenient,
5796 the result should be placed in TARGET. */
5797 static rtx
5798 expand_builtin_signbit (tree exp, rtx target)
5800 const struct real_format *fmt;
5801 enum machine_mode fmode, imode, rmode;
5802 HOST_WIDE_INT hi, lo;
5803 tree arg;
5804 int word, bitpos;
5805 enum insn_code icode;
5806 rtx temp;
5808 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5809 return NULL_RTX;
5811 arg = CALL_EXPR_ARG (exp, 0);
5812 fmode = TYPE_MODE (TREE_TYPE (arg));
5813 rmode = TYPE_MODE (TREE_TYPE (exp));
5814 fmt = REAL_MODE_FORMAT (fmode);
5816 arg = builtin_save_expr (arg);
5818 /* Expand the argument yielding a RTX expression. */
5819 temp = expand_normal (arg);
5821 /* Check if the back end provides an insn that handles signbit for the
5822 argument's mode. */
5823 icode = signbit_optab->handlers [(int) fmode].insn_code;
5824 if (icode != CODE_FOR_nothing)
5826 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5827 emit_unop_insn (icode, target, temp, UNKNOWN);
5828 return target;
5831 /* For floating point formats without a sign bit, implement signbit
5832 as "ARG < 0.0". */
5833 bitpos = fmt->signbit_ro;
5834 if (bitpos < 0)
5836 /* But we can't do this if the format supports signed zero. */
5837 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5838 return NULL_RTX;
5840 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5841 build_real (TREE_TYPE (arg), dconst0));
5842 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5845 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5847 imode = int_mode_for_mode (fmode);
5848 if (imode == BLKmode)
5849 return NULL_RTX;
5850 temp = gen_lowpart (imode, temp);
5852 else
5854 imode = word_mode;
5855 /* Handle targets with different FP word orders. */
5856 if (FLOAT_WORDS_BIG_ENDIAN)
5857 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5858 else
5859 word = bitpos / BITS_PER_WORD;
5860 temp = operand_subword_force (temp, word, fmode);
5861 bitpos = bitpos % BITS_PER_WORD;
5864 /* Force the intermediate word_mode (or narrower) result into a
5865 register. This avoids attempting to create paradoxical SUBREGs
5866 of floating point modes below. */
5867 temp = force_reg (imode, temp);
5869 /* If the bitpos is within the "result mode" lowpart, the operation
5870 can be implement with a single bitwise AND. Otherwise, we need
5871 a right shift and an AND. */
5873 if (bitpos < GET_MODE_BITSIZE (rmode))
5875 if (bitpos < HOST_BITS_PER_WIDE_INT)
5877 hi = 0;
5878 lo = (HOST_WIDE_INT) 1 << bitpos;
5880 else
5882 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5883 lo = 0;
5886 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5887 temp = gen_lowpart (rmode, temp);
5888 temp = expand_binop (rmode, and_optab, temp,
5889 immed_double_const (lo, hi, rmode),
5890 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5892 else
5894 /* Perform a logical right shift to place the signbit in the least
5895 significant bit, then truncate the result to the desired mode
5896 and mask just this bit. */
5897 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5898 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5899 temp = gen_lowpart (rmode, temp);
5900 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5901 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5904 return temp;
5907 /* Expand fork or exec calls. TARGET is the desired target of the
5908 call. EXP is the call. FN is the
5909 identificator of the actual function. IGNORE is nonzero if the
5910 value is to be ignored. */
5912 static rtx
5913 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5915 tree id, decl;
5916 tree call;
5918 /* If we are not profiling, just call the function. */
5919 if (!profile_arc_flag)
5920 return NULL_RTX;
5922 /* Otherwise call the wrapper. This should be equivalent for the rest of
5923 compiler, so the code does not diverge, and the wrapper may run the
5924 code necessary for keeping the profiling sane. */
5926 switch (DECL_FUNCTION_CODE (fn))
5928 case BUILT_IN_FORK:
5929 id = get_identifier ("__gcov_fork");
5930 break;
5932 case BUILT_IN_EXECL:
5933 id = get_identifier ("__gcov_execl");
5934 break;
5936 case BUILT_IN_EXECV:
5937 id = get_identifier ("__gcov_execv");
5938 break;
5940 case BUILT_IN_EXECLP:
5941 id = get_identifier ("__gcov_execlp");
5942 break;
5944 case BUILT_IN_EXECLE:
5945 id = get_identifier ("__gcov_execle");
5946 break;
5948 case BUILT_IN_EXECVP:
5949 id = get_identifier ("__gcov_execvp");
5950 break;
5952 case BUILT_IN_EXECVE:
5953 id = get_identifier ("__gcov_execve");
5954 break;
5956 default:
5957 gcc_unreachable ();
5960 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5961 DECL_EXTERNAL (decl) = 1;
5962 TREE_PUBLIC (decl) = 1;
5963 DECL_ARTIFICIAL (decl) = 1;
5964 TREE_NOTHROW (decl) = 1;
5965 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5966 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5967 call = rewrite_call_expr (exp, 0, decl, 0);
5968 return expand_call (call, target, ignore);
5973 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5974 the pointer in these functions is void*, the tree optimizers may remove
5975 casts. The mode computed in expand_builtin isn't reliable either, due
5976 to __sync_bool_compare_and_swap.
5978 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5979 group of builtins. This gives us log2 of the mode size. */
5981 static inline enum machine_mode
5982 get_builtin_sync_mode (int fcode_diff)
5984 /* The size is not negotiable, so ask not to get BLKmode in return
5985 if the target indicates that a smaller size would be better. */
5986 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5989 /* Expand the memory expression LOC and return the appropriate memory operand
5990 for the builtin_sync operations. */
5992 static rtx
5993 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5995 rtx addr, mem;
5997 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5999 /* Note that we explicitly do not want any alias information for this
6000 memory, so that we kill all other live memories. Otherwise we don't
6001 satisfy the full barrier semantics of the intrinsic. */
6002 mem = validize_mem (gen_rtx_MEM (mode, addr));
6004 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6005 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6006 MEM_VOLATILE_P (mem) = 1;
6008 return mem;
6011 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6012 EXP is the CALL_EXPR. CODE is the rtx code
6013 that corresponds to the arithmetic or logical operation from the name;
6014 an exception here is that NOT actually means NAND. TARGET is an optional
6015 place for us to store the results; AFTER is true if this is the
6016 fetch_and_xxx form. IGNORE is true if we don't actually care about
6017 the result of the operation at all. */
6019 static rtx
6020 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6021 enum rtx_code code, bool after,
6022 rtx target, bool ignore)
6024 rtx val, mem;
6025 enum machine_mode old_mode;
6027 if (code == NOT && warn_sync_nand)
6029 tree fndecl = get_callee_fndecl (exp);
6030 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6032 static bool warned_f_a_n, warned_n_a_f;
6034 switch (fcode)
6036 case BUILT_IN_FETCH_AND_NAND_1:
6037 case BUILT_IN_FETCH_AND_NAND_2:
6038 case BUILT_IN_FETCH_AND_NAND_4:
6039 case BUILT_IN_FETCH_AND_NAND_8:
6040 case BUILT_IN_FETCH_AND_NAND_16:
6042 if (warned_f_a_n)
6043 break;
6045 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6046 inform (input_location,
6047 "%qD changed semantics in GCC 4.4", fndecl);
6048 warned_f_a_n = true;
6049 break;
6051 case BUILT_IN_NAND_AND_FETCH_1:
6052 case BUILT_IN_NAND_AND_FETCH_2:
6053 case BUILT_IN_NAND_AND_FETCH_4:
6054 case BUILT_IN_NAND_AND_FETCH_8:
6055 case BUILT_IN_NAND_AND_FETCH_16:
6057 if (warned_n_a_f)
6058 break;
6060 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6061 inform (input_location,
6062 "%qD changed semantics in GCC 4.4", fndecl);
6063 warned_n_a_f = true;
6064 break;
6066 default:
6067 gcc_unreachable ();
6071 /* Expand the operands. */
6072 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6074 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6075 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6076 of CONST_INTs, where we know the old_mode only from the call argument. */
6077 old_mode = GET_MODE (val);
6078 if (old_mode == VOIDmode)
6079 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6080 val = convert_modes (mode, old_mode, val, 1);
6082 if (ignore)
6083 return expand_sync_operation (mem, val, code);
6084 else
6085 return expand_sync_fetch_operation (mem, val, code, after, target);
6088 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6089 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6090 true if this is the boolean form. TARGET is a place for us to store the
6091 results; this is NOT optional if IS_BOOL is true. */
6093 static rtx
6094 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6095 bool is_bool, rtx target)
6097 rtx old_val, new_val, mem;
6098 enum machine_mode old_mode;
6100 /* Expand the operands. */
6101 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6104 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6105 mode, EXPAND_NORMAL);
6106 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6107 of CONST_INTs, where we know the old_mode only from the call argument. */
6108 old_mode = GET_MODE (old_val);
6109 if (old_mode == VOIDmode)
6110 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6111 old_val = convert_modes (mode, old_mode, old_val, 1);
6113 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6114 mode, EXPAND_NORMAL);
6115 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6116 of CONST_INTs, where we know the old_mode only from the call argument. */
6117 old_mode = GET_MODE (new_val);
6118 if (old_mode == VOIDmode)
6119 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6120 new_val = convert_modes (mode, old_mode, new_val, 1);
6122 if (is_bool)
6123 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6124 else
6125 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6128 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6129 general form is actually an atomic exchange, and some targets only
6130 support a reduced form with the second argument being a constant 1.
6131 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6132 the results. */
6134 static rtx
6135 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6136 rtx target)
6138 rtx val, mem;
6139 enum machine_mode old_mode;
6141 /* Expand the operands. */
6142 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6143 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6144 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6145 of CONST_INTs, where we know the old_mode only from the call argument. */
6146 old_mode = GET_MODE (val);
6147 if (old_mode == VOIDmode)
6148 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6149 val = convert_modes (mode, old_mode, val, 1);
6151 return expand_sync_lock_test_and_set (mem, val, target);
6154 /* Expand the __sync_synchronize intrinsic. */
6156 static void
6157 expand_builtin_synchronize (void)
6159 tree x;
6161 #ifdef HAVE_memory_barrier
6162 if (HAVE_memory_barrier)
6164 emit_insn (gen_memory_barrier ());
6165 return;
6167 #endif
6169 if (synchronize_libfunc != NULL_RTX)
6171 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6172 return;
6175 /* If no explicit memory barrier instruction is available, create an
6176 empty asm stmt with a memory clobber. */
6177 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6178 tree_cons (NULL, build_string (6, "memory"), NULL));
6179 ASM_VOLATILE_P (x) = 1;
6180 expand_asm_expr (x);
6183 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6185 static void
6186 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6188 enum insn_code icode;
6189 rtx mem, insn;
6190 rtx val = const0_rtx;
6192 /* Expand the operands. */
6193 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6195 /* If there is an explicit operation in the md file, use it. */
6196 icode = sync_lock_release[mode];
6197 if (icode != CODE_FOR_nothing)
6199 if (!insn_data[icode].operand[1].predicate (val, mode))
6200 val = force_reg (mode, val);
6202 insn = GEN_FCN (icode) (mem, val);
6203 if (insn)
6205 emit_insn (insn);
6206 return;
6210 /* Otherwise we can implement this operation by emitting a barrier
6211 followed by a store of zero. */
6212 expand_builtin_synchronize ();
6213 emit_move_insn (mem, val);
6216 /* Expand an expression EXP that calls a built-in function,
6217 with result going to TARGET if that's convenient
6218 (and in mode MODE if that's convenient).
6219 SUBTARGET may be used as the target for computing one of EXP's operands.
6220 IGNORE is nonzero if the value is to be ignored. */
6223 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6224 int ignore)
6226 tree fndecl = get_callee_fndecl (exp);
6227 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6228 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6230 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6231 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6233 /* When not optimizing, generate calls to library functions for a certain
6234 set of builtins. */
6235 if (!optimize
6236 && !called_as_built_in (fndecl)
6237 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6238 && fcode != BUILT_IN_ALLOCA
6239 && fcode != BUILT_IN_FREE)
6240 return expand_call (exp, target, ignore);
6242 /* The built-in function expanders test for target == const0_rtx
6243 to determine whether the function's result will be ignored. */
6244 if (ignore)
6245 target = const0_rtx;
6247 /* If the result of a pure or const built-in function is ignored, and
6248 none of its arguments are volatile, we can avoid expanding the
6249 built-in call and just evaluate the arguments for side-effects. */
6250 if (target == const0_rtx
6251 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6253 bool volatilep = false;
6254 tree arg;
6255 call_expr_arg_iterator iter;
6257 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6258 if (TREE_THIS_VOLATILE (arg))
6260 volatilep = true;
6261 break;
6264 if (! volatilep)
6266 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6267 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6268 return const0_rtx;
6272 switch (fcode)
6274 CASE_FLT_FN (BUILT_IN_FABS):
6275 target = expand_builtin_fabs (exp, target, subtarget);
6276 if (target)
6277 return target;
6278 break;
6280 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6281 target = expand_builtin_copysign (exp, target, subtarget);
6282 if (target)
6283 return target;
6284 break;
6286 /* Just do a normal library call if we were unable to fold
6287 the values. */
6288 CASE_FLT_FN (BUILT_IN_CABS):
6289 break;
6291 CASE_FLT_FN (BUILT_IN_EXP):
6292 CASE_FLT_FN (BUILT_IN_EXP10):
6293 CASE_FLT_FN (BUILT_IN_POW10):
6294 CASE_FLT_FN (BUILT_IN_EXP2):
6295 CASE_FLT_FN (BUILT_IN_EXPM1):
6296 CASE_FLT_FN (BUILT_IN_LOGB):
6297 CASE_FLT_FN (BUILT_IN_LOG):
6298 CASE_FLT_FN (BUILT_IN_LOG10):
6299 CASE_FLT_FN (BUILT_IN_LOG2):
6300 CASE_FLT_FN (BUILT_IN_LOG1P):
6301 CASE_FLT_FN (BUILT_IN_TAN):
6302 CASE_FLT_FN (BUILT_IN_ASIN):
6303 CASE_FLT_FN (BUILT_IN_ACOS):
6304 CASE_FLT_FN (BUILT_IN_ATAN):
6305 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6306 because of possible accuracy problems. */
6307 if (! flag_unsafe_math_optimizations)
6308 break;
6309 CASE_FLT_FN (BUILT_IN_SQRT):
6310 CASE_FLT_FN (BUILT_IN_FLOOR):
6311 CASE_FLT_FN (BUILT_IN_CEIL):
6312 CASE_FLT_FN (BUILT_IN_TRUNC):
6313 CASE_FLT_FN (BUILT_IN_ROUND):
6314 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6315 CASE_FLT_FN (BUILT_IN_RINT):
6316 target = expand_builtin_mathfn (exp, target, subtarget);
6317 if (target)
6318 return target;
6319 break;
6321 CASE_FLT_FN (BUILT_IN_ILOGB):
6322 if (! flag_unsafe_math_optimizations)
6323 break;
6324 CASE_FLT_FN (BUILT_IN_ISINF):
6325 CASE_FLT_FN (BUILT_IN_FINITE):
6326 case BUILT_IN_ISFINITE:
6327 case BUILT_IN_ISNORMAL:
6328 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6329 if (target)
6330 return target;
6331 break;
6333 CASE_FLT_FN (BUILT_IN_LCEIL):
6334 CASE_FLT_FN (BUILT_IN_LLCEIL):
6335 CASE_FLT_FN (BUILT_IN_LFLOOR):
6336 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6337 target = expand_builtin_int_roundingfn (exp, target);
6338 if (target)
6339 return target;
6340 break;
6342 CASE_FLT_FN (BUILT_IN_LRINT):
6343 CASE_FLT_FN (BUILT_IN_LLRINT):
6344 CASE_FLT_FN (BUILT_IN_LROUND):
6345 CASE_FLT_FN (BUILT_IN_LLROUND):
6346 target = expand_builtin_int_roundingfn_2 (exp, target);
6347 if (target)
6348 return target;
6349 break;
6351 CASE_FLT_FN (BUILT_IN_POW):
6352 target = expand_builtin_pow (exp, target, subtarget);
6353 if (target)
6354 return target;
6355 break;
6357 CASE_FLT_FN (BUILT_IN_POWI):
6358 target = expand_builtin_powi (exp, target, subtarget);
6359 if (target)
6360 return target;
6361 break;
6363 CASE_FLT_FN (BUILT_IN_ATAN2):
6364 CASE_FLT_FN (BUILT_IN_LDEXP):
6365 CASE_FLT_FN (BUILT_IN_SCALB):
6366 CASE_FLT_FN (BUILT_IN_SCALBN):
6367 CASE_FLT_FN (BUILT_IN_SCALBLN):
6368 if (! flag_unsafe_math_optimizations)
6369 break;
6371 CASE_FLT_FN (BUILT_IN_FMOD):
6372 CASE_FLT_FN (BUILT_IN_REMAINDER):
6373 CASE_FLT_FN (BUILT_IN_DREM):
6374 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6375 if (target)
6376 return target;
6377 break;
6379 CASE_FLT_FN (BUILT_IN_CEXPI):
6380 target = expand_builtin_cexpi (exp, target, subtarget);
6381 gcc_assert (target);
6382 return target;
6384 CASE_FLT_FN (BUILT_IN_SIN):
6385 CASE_FLT_FN (BUILT_IN_COS):
6386 if (! flag_unsafe_math_optimizations)
6387 break;
6388 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6389 if (target)
6390 return target;
6391 break;
6393 CASE_FLT_FN (BUILT_IN_SINCOS):
6394 if (! flag_unsafe_math_optimizations)
6395 break;
6396 target = expand_builtin_sincos (exp);
6397 if (target)
6398 return target;
6399 break;
6401 case BUILT_IN_APPLY_ARGS:
6402 return expand_builtin_apply_args ();
6404 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6405 FUNCTION with a copy of the parameters described by
6406 ARGUMENTS, and ARGSIZE. It returns a block of memory
6407 allocated on the stack into which is stored all the registers
6408 that might possibly be used for returning the result of a
6409 function. ARGUMENTS is the value returned by
6410 __builtin_apply_args. ARGSIZE is the number of bytes of
6411 arguments that must be copied. ??? How should this value be
6412 computed? We'll also need a safe worst case value for varargs
6413 functions. */
6414 case BUILT_IN_APPLY:
6415 if (!validate_arglist (exp, POINTER_TYPE,
6416 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6417 && !validate_arglist (exp, REFERENCE_TYPE,
6418 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6419 return const0_rtx;
6420 else
6422 rtx ops[3];
6424 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6425 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6426 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6428 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6431 /* __builtin_return (RESULT) causes the function to return the
6432 value described by RESULT. RESULT is address of the block of
6433 memory returned by __builtin_apply. */
6434 case BUILT_IN_RETURN:
6435 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6436 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6437 return const0_rtx;
6439 case BUILT_IN_SAVEREGS:
6440 return expand_builtin_saveregs ();
6442 case BUILT_IN_ARGS_INFO:
6443 return expand_builtin_args_info (exp);
6445 case BUILT_IN_VA_ARG_PACK:
6446 /* All valid uses of __builtin_va_arg_pack () are removed during
6447 inlining. */
6448 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6449 return const0_rtx;
6451 case BUILT_IN_VA_ARG_PACK_LEN:
6452 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6453 inlining. */
6454 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6455 return const0_rtx;
6457 /* Return the address of the first anonymous stack arg. */
6458 case BUILT_IN_NEXT_ARG:
6459 if (fold_builtin_next_arg (exp, false))
6460 return const0_rtx;
6461 return expand_builtin_next_arg ();
6463 case BUILT_IN_CLEAR_CACHE:
6464 target = expand_builtin___clear_cache (exp);
6465 if (target)
6466 return target;
6467 break;
6469 case BUILT_IN_CLASSIFY_TYPE:
6470 return expand_builtin_classify_type (exp);
6472 case BUILT_IN_CONSTANT_P:
6473 return const0_rtx;
6475 case BUILT_IN_FRAME_ADDRESS:
6476 case BUILT_IN_RETURN_ADDRESS:
6477 return expand_builtin_frame_address (fndecl, exp);
6479 /* Returns the address of the area where the structure is returned.
6480 0 otherwise. */
6481 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6482 if (call_expr_nargs (exp) != 0
6483 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6484 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6485 return const0_rtx;
6486 else
6487 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6489 case BUILT_IN_ALLOCA:
6490 target = expand_builtin_alloca (exp, target);
6491 if (target)
6492 return target;
6493 break;
6495 case BUILT_IN_STACK_SAVE:
6496 return expand_stack_save ();
6498 case BUILT_IN_STACK_RESTORE:
6499 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6500 return const0_rtx;
6502 case BUILT_IN_BSWAP32:
6503 case BUILT_IN_BSWAP64:
6504 target = expand_builtin_bswap (exp, target, subtarget);
6506 if (target)
6507 return target;
6508 break;
6510 CASE_INT_FN (BUILT_IN_FFS):
6511 case BUILT_IN_FFSIMAX:
6512 target = expand_builtin_unop (target_mode, exp, target,
6513 subtarget, ffs_optab);
6514 if (target)
6515 return target;
6516 break;
6518 CASE_INT_FN (BUILT_IN_CLZ):
6519 case BUILT_IN_CLZIMAX:
6520 target = expand_builtin_unop (target_mode, exp, target,
6521 subtarget, clz_optab);
6522 if (target)
6523 return target;
6524 break;
6526 CASE_INT_FN (BUILT_IN_CTZ):
6527 case BUILT_IN_CTZIMAX:
6528 target = expand_builtin_unop (target_mode, exp, target,
6529 subtarget, ctz_optab);
6530 if (target)
6531 return target;
6532 break;
6534 CASE_INT_FN (BUILT_IN_POPCOUNT):
6535 case BUILT_IN_POPCOUNTIMAX:
6536 target = expand_builtin_unop (target_mode, exp, target,
6537 subtarget, popcount_optab);
6538 if (target)
6539 return target;
6540 break;
6542 CASE_INT_FN (BUILT_IN_PARITY):
6543 case BUILT_IN_PARITYIMAX:
6544 target = expand_builtin_unop (target_mode, exp, target,
6545 subtarget, parity_optab);
6546 if (target)
6547 return target;
6548 break;
6550 case BUILT_IN_STRLEN:
6551 target = expand_builtin_strlen (exp, target, target_mode);
6552 if (target)
6553 return target;
6554 break;
6556 case BUILT_IN_STRCPY:
6557 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6558 if (target)
6559 return target;
6560 break;
6562 case BUILT_IN_STRNCPY:
6563 target = expand_builtin_strncpy (exp, target, mode);
6564 if (target)
6565 return target;
6566 break;
6568 case BUILT_IN_STPCPY:
6569 target = expand_builtin_stpcpy (exp, target, mode);
6570 if (target)
6571 return target;
6572 break;
6574 case BUILT_IN_STRCAT:
6575 target = expand_builtin_strcat (fndecl, exp, target, mode);
6576 if (target)
6577 return target;
6578 break;
6580 case BUILT_IN_STRNCAT:
6581 target = expand_builtin_strncat (exp, target, mode);
6582 if (target)
6583 return target;
6584 break;
6586 case BUILT_IN_STRSPN:
6587 target = expand_builtin_strspn (exp, target, mode);
6588 if (target)
6589 return target;
6590 break;
6592 case BUILT_IN_STRCSPN:
6593 target = expand_builtin_strcspn (exp, target, mode);
6594 if (target)
6595 return target;
6596 break;
6598 case BUILT_IN_STRSTR:
6599 target = expand_builtin_strstr (exp, target, mode);
6600 if (target)
6601 return target;
6602 break;
6604 case BUILT_IN_STRPBRK:
6605 target = expand_builtin_strpbrk (exp, target, mode);
6606 if (target)
6607 return target;
6608 break;
6610 case BUILT_IN_INDEX:
6611 case BUILT_IN_STRCHR:
6612 target = expand_builtin_strchr (exp, target, mode);
6613 if (target)
6614 return target;
6615 break;
6617 case BUILT_IN_RINDEX:
6618 case BUILT_IN_STRRCHR:
6619 target = expand_builtin_strrchr (exp, target, mode);
6620 if (target)
6621 return target;
6622 break;
6624 case BUILT_IN_MEMCPY:
6625 target = expand_builtin_memcpy (exp, target, mode);
6626 if (target)
6627 return target;
6628 break;
6630 case BUILT_IN_MEMPCPY:
6631 target = expand_builtin_mempcpy (exp, target, mode);
6632 if (target)
6633 return target;
6634 break;
6636 case BUILT_IN_MEMMOVE:
6637 target = expand_builtin_memmove (exp, target, mode, ignore);
6638 if (target)
6639 return target;
6640 break;
6642 case BUILT_IN_BCOPY:
6643 target = expand_builtin_bcopy (exp, ignore);
6644 if (target)
6645 return target;
6646 break;
6648 case BUILT_IN_MEMSET:
6649 target = expand_builtin_memset (exp, target, mode);
6650 if (target)
6651 return target;
6652 break;
6654 case BUILT_IN_BZERO:
6655 target = expand_builtin_bzero (exp);
6656 if (target)
6657 return target;
6658 break;
6660 case BUILT_IN_STRCMP:
6661 target = expand_builtin_strcmp (exp, target, mode);
6662 if (target)
6663 return target;
6664 break;
6666 case BUILT_IN_STRNCMP:
6667 target = expand_builtin_strncmp (exp, target, mode);
6668 if (target)
6669 return target;
6670 break;
6672 case BUILT_IN_MEMCHR:
6673 target = expand_builtin_memchr (exp, target, mode);
6674 if (target)
6675 return target;
6676 break;
6678 case BUILT_IN_BCMP:
6679 case BUILT_IN_MEMCMP:
6680 target = expand_builtin_memcmp (exp, target, mode);
6681 if (target)
6682 return target;
6683 break;
6685 case BUILT_IN_SETJMP:
6686 /* This should have been lowered to the builtins below. */
6687 gcc_unreachable ();
6689 case BUILT_IN_SETJMP_SETUP:
6690 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6691 and the receiver label. */
6692 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6694 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6695 VOIDmode, EXPAND_NORMAL);
6696 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6697 rtx label_r = label_rtx (label);
6699 /* This is copied from the handling of non-local gotos. */
6700 expand_builtin_setjmp_setup (buf_addr, label_r);
6701 nonlocal_goto_handler_labels
6702 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6703 nonlocal_goto_handler_labels);
6704 /* ??? Do not let expand_label treat us as such since we would
6705 not want to be both on the list of non-local labels and on
6706 the list of forced labels. */
6707 FORCED_LABEL (label) = 0;
6708 return const0_rtx;
6710 break;
6712 case BUILT_IN_SETJMP_DISPATCHER:
6713 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6714 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6716 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6717 rtx label_r = label_rtx (label);
6719 /* Remove the dispatcher label from the list of non-local labels
6720 since the receiver labels have been added to it above. */
6721 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6722 return const0_rtx;
6724 break;
6726 case BUILT_IN_SETJMP_RECEIVER:
6727 /* __builtin_setjmp_receiver is passed the receiver label. */
6728 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6730 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6731 rtx label_r = label_rtx (label);
6733 expand_builtin_setjmp_receiver (label_r);
6734 return const0_rtx;
6736 break;
6738 /* __builtin_longjmp is passed a pointer to an array of five words.
6739 It's similar to the C library longjmp function but works with
6740 __builtin_setjmp above. */
6741 case BUILT_IN_LONGJMP:
6742 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6744 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6745 VOIDmode, EXPAND_NORMAL);
6746 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6748 if (value != const1_rtx)
6750 error ("%<__builtin_longjmp%> second argument must be 1");
6751 return const0_rtx;
6754 expand_builtin_longjmp (buf_addr, value);
6755 return const0_rtx;
6757 break;
6759 case BUILT_IN_NONLOCAL_GOTO:
6760 target = expand_builtin_nonlocal_goto (exp);
6761 if (target)
6762 return target;
6763 break;
6765 /* This updates the setjmp buffer that is its argument with the value
6766 of the current stack pointer. */
6767 case BUILT_IN_UPDATE_SETJMP_BUF:
6768 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6770 rtx buf_addr
6771 = expand_normal (CALL_EXPR_ARG (exp, 0));
6773 expand_builtin_update_setjmp_buf (buf_addr);
6774 return const0_rtx;
6776 break;
6778 case BUILT_IN_TRAP:
6779 expand_builtin_trap ();
6780 return const0_rtx;
6782 case BUILT_IN_PRINTF:
6783 target = expand_builtin_printf (exp, target, mode, false);
6784 if (target)
6785 return target;
6786 break;
6788 case BUILT_IN_PRINTF_UNLOCKED:
6789 target = expand_builtin_printf (exp, target, mode, true);
6790 if (target)
6791 return target;
6792 break;
6794 case BUILT_IN_FPUTS:
6795 target = expand_builtin_fputs (exp, target, false);
6796 if (target)
6797 return target;
6798 break;
6799 case BUILT_IN_FPUTS_UNLOCKED:
6800 target = expand_builtin_fputs (exp, target, true);
6801 if (target)
6802 return target;
6803 break;
6805 case BUILT_IN_FPRINTF:
6806 target = expand_builtin_fprintf (exp, target, mode, false);
6807 if (target)
6808 return target;
6809 break;
6811 case BUILT_IN_FPRINTF_UNLOCKED:
6812 target = expand_builtin_fprintf (exp, target, mode, true);
6813 if (target)
6814 return target;
6815 break;
6817 case BUILT_IN_SPRINTF:
6818 target = expand_builtin_sprintf (exp, target, mode);
6819 if (target)
6820 return target;
6821 break;
6823 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6824 case BUILT_IN_SIGNBITD32:
6825 case BUILT_IN_SIGNBITD64:
6826 case BUILT_IN_SIGNBITD128:
6827 target = expand_builtin_signbit (exp, target);
6828 if (target)
6829 return target;
6830 break;
6832 /* Various hooks for the DWARF 2 __throw routine. */
6833 case BUILT_IN_UNWIND_INIT:
6834 expand_builtin_unwind_init ();
6835 return const0_rtx;
6836 case BUILT_IN_DWARF_CFA:
6837 return virtual_cfa_rtx;
6838 #ifdef DWARF2_UNWIND_INFO
6839 case BUILT_IN_DWARF_SP_COLUMN:
6840 return expand_builtin_dwarf_sp_column ();
6841 case BUILT_IN_INIT_DWARF_REG_SIZES:
6842 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6843 return const0_rtx;
6844 #endif
6845 case BUILT_IN_FROB_RETURN_ADDR:
6846 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6847 case BUILT_IN_EXTRACT_RETURN_ADDR:
6848 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6849 case BUILT_IN_EH_RETURN:
6850 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6851 CALL_EXPR_ARG (exp, 1));
6852 return const0_rtx;
6853 #ifdef EH_RETURN_DATA_REGNO
6854 case BUILT_IN_EH_RETURN_DATA_REGNO:
6855 return expand_builtin_eh_return_data_regno (exp);
6856 #endif
6857 case BUILT_IN_EXTEND_POINTER:
6858 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6860 case BUILT_IN_VA_START:
6861 return expand_builtin_va_start (exp);
6862 case BUILT_IN_VA_END:
6863 return expand_builtin_va_end (exp);
6864 case BUILT_IN_VA_COPY:
6865 return expand_builtin_va_copy (exp);
6866 case BUILT_IN_EXPECT:
6867 return expand_builtin_expect (exp, target);
6868 case BUILT_IN_PREFETCH:
6869 expand_builtin_prefetch (exp);
6870 return const0_rtx;
6872 case BUILT_IN_PROFILE_FUNC_ENTER:
6873 return expand_builtin_profile_func (false);
6874 case BUILT_IN_PROFILE_FUNC_EXIT:
6875 return expand_builtin_profile_func (true);
6877 case BUILT_IN_INIT_TRAMPOLINE:
6878 return expand_builtin_init_trampoline (exp);
6879 case BUILT_IN_ADJUST_TRAMPOLINE:
6880 return expand_builtin_adjust_trampoline (exp);
6882 case BUILT_IN_FORK:
6883 case BUILT_IN_EXECL:
6884 case BUILT_IN_EXECV:
6885 case BUILT_IN_EXECLP:
6886 case BUILT_IN_EXECLE:
6887 case BUILT_IN_EXECVP:
6888 case BUILT_IN_EXECVE:
6889 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6890 if (target)
6891 return target;
6892 break;
6894 case BUILT_IN_FETCH_AND_ADD_1:
6895 case BUILT_IN_FETCH_AND_ADD_2:
6896 case BUILT_IN_FETCH_AND_ADD_4:
6897 case BUILT_IN_FETCH_AND_ADD_8:
6898 case BUILT_IN_FETCH_AND_ADD_16:
6899 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6900 target = expand_builtin_sync_operation (mode, exp, PLUS,
6901 false, target, ignore);
6902 if (target)
6903 return target;
6904 break;
6906 case BUILT_IN_FETCH_AND_SUB_1:
6907 case BUILT_IN_FETCH_AND_SUB_2:
6908 case BUILT_IN_FETCH_AND_SUB_4:
6909 case BUILT_IN_FETCH_AND_SUB_8:
6910 case BUILT_IN_FETCH_AND_SUB_16:
6911 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6912 target = expand_builtin_sync_operation (mode, exp, MINUS,
6913 false, target, ignore);
6914 if (target)
6915 return target;
6916 break;
6918 case BUILT_IN_FETCH_AND_OR_1:
6919 case BUILT_IN_FETCH_AND_OR_2:
6920 case BUILT_IN_FETCH_AND_OR_4:
6921 case BUILT_IN_FETCH_AND_OR_8:
6922 case BUILT_IN_FETCH_AND_OR_16:
6923 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6924 target = expand_builtin_sync_operation (mode, exp, IOR,
6925 false, target, ignore);
6926 if (target)
6927 return target;
6928 break;
6930 case BUILT_IN_FETCH_AND_AND_1:
6931 case BUILT_IN_FETCH_AND_AND_2:
6932 case BUILT_IN_FETCH_AND_AND_4:
6933 case BUILT_IN_FETCH_AND_AND_8:
6934 case BUILT_IN_FETCH_AND_AND_16:
6935 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6936 target = expand_builtin_sync_operation (mode, exp, AND,
6937 false, target, ignore);
6938 if (target)
6939 return target;
6940 break;
6942 case BUILT_IN_FETCH_AND_XOR_1:
6943 case BUILT_IN_FETCH_AND_XOR_2:
6944 case BUILT_IN_FETCH_AND_XOR_4:
6945 case BUILT_IN_FETCH_AND_XOR_8:
6946 case BUILT_IN_FETCH_AND_XOR_16:
6947 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6948 target = expand_builtin_sync_operation (mode, exp, XOR,
6949 false, target, ignore);
6950 if (target)
6951 return target;
6952 break;
6954 case BUILT_IN_FETCH_AND_NAND_1:
6955 case BUILT_IN_FETCH_AND_NAND_2:
6956 case BUILT_IN_FETCH_AND_NAND_4:
6957 case BUILT_IN_FETCH_AND_NAND_8:
6958 case BUILT_IN_FETCH_AND_NAND_16:
6959 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6960 target = expand_builtin_sync_operation (mode, exp, NOT,
6961 false, target, ignore);
6962 if (target)
6963 return target;
6964 break;
6966 case BUILT_IN_ADD_AND_FETCH_1:
6967 case BUILT_IN_ADD_AND_FETCH_2:
6968 case BUILT_IN_ADD_AND_FETCH_4:
6969 case BUILT_IN_ADD_AND_FETCH_8:
6970 case BUILT_IN_ADD_AND_FETCH_16:
6971 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6972 target = expand_builtin_sync_operation (mode, exp, PLUS,
6973 true, target, ignore);
6974 if (target)
6975 return target;
6976 break;
6978 case BUILT_IN_SUB_AND_FETCH_1:
6979 case BUILT_IN_SUB_AND_FETCH_2:
6980 case BUILT_IN_SUB_AND_FETCH_4:
6981 case BUILT_IN_SUB_AND_FETCH_8:
6982 case BUILT_IN_SUB_AND_FETCH_16:
6983 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6984 target = expand_builtin_sync_operation (mode, exp, MINUS,
6985 true, target, ignore);
6986 if (target)
6987 return target;
6988 break;
6990 case BUILT_IN_OR_AND_FETCH_1:
6991 case BUILT_IN_OR_AND_FETCH_2:
6992 case BUILT_IN_OR_AND_FETCH_4:
6993 case BUILT_IN_OR_AND_FETCH_8:
6994 case BUILT_IN_OR_AND_FETCH_16:
6995 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6996 target = expand_builtin_sync_operation (mode, exp, IOR,
6997 true, target, ignore);
6998 if (target)
6999 return target;
7000 break;
7002 case BUILT_IN_AND_AND_FETCH_1:
7003 case BUILT_IN_AND_AND_FETCH_2:
7004 case BUILT_IN_AND_AND_FETCH_4:
7005 case BUILT_IN_AND_AND_FETCH_8:
7006 case BUILT_IN_AND_AND_FETCH_16:
7007 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7008 target = expand_builtin_sync_operation (mode, exp, AND,
7009 true, target, ignore);
7010 if (target)
7011 return target;
7012 break;
7014 case BUILT_IN_XOR_AND_FETCH_1:
7015 case BUILT_IN_XOR_AND_FETCH_2:
7016 case BUILT_IN_XOR_AND_FETCH_4:
7017 case BUILT_IN_XOR_AND_FETCH_8:
7018 case BUILT_IN_XOR_AND_FETCH_16:
7019 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7020 target = expand_builtin_sync_operation (mode, exp, XOR,
7021 true, target, ignore);
7022 if (target)
7023 return target;
7024 break;
7026 case BUILT_IN_NAND_AND_FETCH_1:
7027 case BUILT_IN_NAND_AND_FETCH_2:
7028 case BUILT_IN_NAND_AND_FETCH_4:
7029 case BUILT_IN_NAND_AND_FETCH_8:
7030 case BUILT_IN_NAND_AND_FETCH_16:
7031 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7032 target = expand_builtin_sync_operation (mode, exp, NOT,
7033 true, target, ignore);
7034 if (target)
7035 return target;
7036 break;
7038 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7039 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7040 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7041 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7042 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7043 if (mode == VOIDmode)
7044 mode = TYPE_MODE (boolean_type_node);
7045 if (!target || !register_operand (target, mode))
7046 target = gen_reg_rtx (mode);
7048 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7049 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7050 if (target)
7051 return target;
7052 break;
7054 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7055 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7056 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7057 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7058 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7059 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7060 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7061 if (target)
7062 return target;
7063 break;
7065 case BUILT_IN_LOCK_TEST_AND_SET_1:
7066 case BUILT_IN_LOCK_TEST_AND_SET_2:
7067 case BUILT_IN_LOCK_TEST_AND_SET_4:
7068 case BUILT_IN_LOCK_TEST_AND_SET_8:
7069 case BUILT_IN_LOCK_TEST_AND_SET_16:
7070 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7071 target = expand_builtin_lock_test_and_set (mode, exp, target);
7072 if (target)
7073 return target;
7074 break;
7076 case BUILT_IN_LOCK_RELEASE_1:
7077 case BUILT_IN_LOCK_RELEASE_2:
7078 case BUILT_IN_LOCK_RELEASE_4:
7079 case BUILT_IN_LOCK_RELEASE_8:
7080 case BUILT_IN_LOCK_RELEASE_16:
7081 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7082 expand_builtin_lock_release (mode, exp);
7083 return const0_rtx;
7085 case BUILT_IN_SYNCHRONIZE:
7086 expand_builtin_synchronize ();
7087 return const0_rtx;
7089 case BUILT_IN_OBJECT_SIZE:
7090 return expand_builtin_object_size (exp);
7092 case BUILT_IN_MEMCPY_CHK:
7093 case BUILT_IN_MEMPCPY_CHK:
7094 case BUILT_IN_MEMMOVE_CHK:
7095 case BUILT_IN_MEMSET_CHK:
7096 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7097 if (target)
7098 return target;
7099 break;
7101 case BUILT_IN_STRCPY_CHK:
7102 case BUILT_IN_STPCPY_CHK:
7103 case BUILT_IN_STRNCPY_CHK:
7104 case BUILT_IN_STRCAT_CHK:
7105 case BUILT_IN_STRNCAT_CHK:
7106 case BUILT_IN_SNPRINTF_CHK:
7107 case BUILT_IN_VSNPRINTF_CHK:
7108 maybe_emit_chk_warning (exp, fcode);
7109 break;
7111 case BUILT_IN_SPRINTF_CHK:
7112 case BUILT_IN_VSPRINTF_CHK:
7113 maybe_emit_sprintf_chk_warning (exp, fcode);
7114 break;
7116 case BUILT_IN_FREE:
7117 maybe_emit_free_warning (exp);
7118 break;
7120 default: /* just do library call, if unknown builtin */
7121 break;
7124 /* The switch statement above can drop through to cause the function
7125 to be called normally. */
7126 return expand_call (exp, target, ignore);
7129 /* Determine whether a tree node represents a call to a built-in
7130 function. If the tree T is a call to a built-in function with
7131 the right number of arguments of the appropriate types, return
7132 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7133 Otherwise the return value is END_BUILTINS. */
7135 enum built_in_function
7136 builtin_mathfn_code (const_tree t)
7138 const_tree fndecl, arg, parmlist;
7139 const_tree argtype, parmtype;
7140 const_call_expr_arg_iterator iter;
7142 if (TREE_CODE (t) != CALL_EXPR
7143 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7144 return END_BUILTINS;
7146 fndecl = get_callee_fndecl (t);
7147 if (fndecl == NULL_TREE
7148 || TREE_CODE (fndecl) != FUNCTION_DECL
7149 || ! DECL_BUILT_IN (fndecl)
7150 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7151 return END_BUILTINS;
7153 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7154 init_const_call_expr_arg_iterator (t, &iter);
7155 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7157 /* If a function doesn't take a variable number of arguments,
7158 the last element in the list will have type `void'. */
7159 parmtype = TREE_VALUE (parmlist);
7160 if (VOID_TYPE_P (parmtype))
7162 if (more_const_call_expr_args_p (&iter))
7163 return END_BUILTINS;
7164 return DECL_FUNCTION_CODE (fndecl);
7167 if (! more_const_call_expr_args_p (&iter))
7168 return END_BUILTINS;
7170 arg = next_const_call_expr_arg (&iter);
7171 argtype = TREE_TYPE (arg);
7173 if (SCALAR_FLOAT_TYPE_P (parmtype))
7175 if (! SCALAR_FLOAT_TYPE_P (argtype))
7176 return END_BUILTINS;
7178 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7180 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7181 return END_BUILTINS;
7183 else if (POINTER_TYPE_P (parmtype))
7185 if (! POINTER_TYPE_P (argtype))
7186 return END_BUILTINS;
7188 else if (INTEGRAL_TYPE_P (parmtype))
7190 if (! INTEGRAL_TYPE_P (argtype))
7191 return END_BUILTINS;
7193 else
7194 return END_BUILTINS;
7197 /* Variable-length argument list. */
7198 return DECL_FUNCTION_CODE (fndecl);
7201 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7202 evaluate to a constant. */
7204 static tree
7205 fold_builtin_constant_p (tree arg)
7207 /* We return 1 for a numeric type that's known to be a constant
7208 value at compile-time or for an aggregate type that's a
7209 literal constant. */
7210 STRIP_NOPS (arg);
7212 /* If we know this is a constant, emit the constant of one. */
7213 if (CONSTANT_CLASS_P (arg)
7214 || (TREE_CODE (arg) == CONSTRUCTOR
7215 && TREE_CONSTANT (arg)))
7216 return integer_one_node;
7217 if (TREE_CODE (arg) == ADDR_EXPR)
7219 tree op = TREE_OPERAND (arg, 0);
7220 if (TREE_CODE (op) == STRING_CST
7221 || (TREE_CODE (op) == ARRAY_REF
7222 && integer_zerop (TREE_OPERAND (op, 1))
7223 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7224 return integer_one_node;
7227 /* If this expression has side effects, show we don't know it to be a
7228 constant. Likewise if it's a pointer or aggregate type since in
7229 those case we only want literals, since those are only optimized
7230 when generating RTL, not later.
7231 And finally, if we are compiling an initializer, not code, we
7232 need to return a definite result now; there's not going to be any
7233 more optimization done. */
7234 if (TREE_SIDE_EFFECTS (arg)
7235 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7236 || POINTER_TYPE_P (TREE_TYPE (arg))
7237 || cfun == 0
7238 || folding_initializer)
7239 return integer_zero_node;
7241 return NULL_TREE;
7244 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7245 return it as a truthvalue. */
7247 static tree
7248 build_builtin_expect_predicate (tree pred, tree expected)
7250 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7252 fn = built_in_decls[BUILT_IN_EXPECT];
7253 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7254 ret_type = TREE_TYPE (TREE_TYPE (fn));
7255 pred_type = TREE_VALUE (arg_types);
7256 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7258 pred = fold_convert (pred_type, pred);
7259 expected = fold_convert (expected_type, expected);
7260 call_expr = build_call_expr (fn, 2, pred, expected);
7262 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7263 build_int_cst (ret_type, 0));
7266 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7267 NULL_TREE if no simplification is possible. */
7269 static tree
7270 fold_builtin_expect (tree arg0, tree arg1)
7272 tree inner, fndecl;
7273 enum tree_code code;
7275 /* If this is a builtin_expect within a builtin_expect keep the
7276 inner one. See through a comparison against a constant. It
7277 might have been added to create a thruthvalue. */
7278 inner = arg0;
7279 if (COMPARISON_CLASS_P (inner)
7280 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7281 inner = TREE_OPERAND (inner, 0);
7283 if (TREE_CODE (inner) == CALL_EXPR
7284 && (fndecl = get_callee_fndecl (inner))
7285 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7286 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7287 return arg0;
7289 /* Distribute the expected value over short-circuiting operators.
7290 See through the cast from truthvalue_type_node to long. */
7291 inner = arg0;
7292 while (TREE_CODE (inner) == NOP_EXPR
7293 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7294 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7295 inner = TREE_OPERAND (inner, 0);
7297 code = TREE_CODE (inner);
7298 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7300 tree op0 = TREE_OPERAND (inner, 0);
7301 tree op1 = TREE_OPERAND (inner, 1);
7303 op0 = build_builtin_expect_predicate (op0, arg1);
7304 op1 = build_builtin_expect_predicate (op1, arg1);
7305 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7307 return fold_convert (TREE_TYPE (arg0), inner);
7310 /* If the argument isn't invariant then there's nothing else we can do. */
7311 if (!TREE_CONSTANT (arg0))
7312 return NULL_TREE;
7314 /* If we expect that a comparison against the argument will fold to
7315 a constant return the constant. In practice, this means a true
7316 constant or the address of a non-weak symbol. */
7317 inner = arg0;
7318 STRIP_NOPS (inner);
7319 if (TREE_CODE (inner) == ADDR_EXPR)
7323 inner = TREE_OPERAND (inner, 0);
7325 while (TREE_CODE (inner) == COMPONENT_REF
7326 || TREE_CODE (inner) == ARRAY_REF);
7327 if ((TREE_CODE (inner) == VAR_DECL
7328 || TREE_CODE (inner) == FUNCTION_DECL)
7329 && DECL_WEAK (inner))
7330 return NULL_TREE;
7333 /* Otherwise, ARG0 already has the proper type for the return value. */
7334 return arg0;
7337 /* Fold a call to __builtin_classify_type with argument ARG. */
7339 static tree
7340 fold_builtin_classify_type (tree arg)
7342 if (arg == 0)
7343 return build_int_cst (NULL_TREE, no_type_class);
7345 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7348 /* Fold a call to __builtin_strlen with argument ARG. */
7350 static tree
7351 fold_builtin_strlen (tree arg)
7353 if (!validate_arg (arg, POINTER_TYPE))
7354 return NULL_TREE;
7355 else
7357 tree len = c_strlen (arg, 0);
7359 if (len)
7361 /* Convert from the internal "sizetype" type to "size_t". */
7362 if (size_type_node)
7363 len = fold_convert (size_type_node, len);
7364 return len;
7367 return NULL_TREE;
7371 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7373 static tree
7374 fold_builtin_inf (tree type, int warn)
7376 REAL_VALUE_TYPE real;
7378 /* __builtin_inff is intended to be usable to define INFINITY on all
7379 targets. If an infinity is not available, INFINITY expands "to a
7380 positive constant of type float that overflows at translation
7381 time", footnote "In this case, using INFINITY will violate the
7382 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7383 Thus we pedwarn to ensure this constraint violation is
7384 diagnosed. */
7385 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7386 pedwarn (input_location, 0, "target format does not support infinity");
7388 real_inf (&real);
7389 return build_real (type, real);
7392 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7394 static tree
7395 fold_builtin_nan (tree arg, tree type, int quiet)
7397 REAL_VALUE_TYPE real;
7398 const char *str;
7400 if (!validate_arg (arg, POINTER_TYPE))
7401 return NULL_TREE;
7402 str = c_getstr (arg);
7403 if (!str)
7404 return NULL_TREE;
7406 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7407 return NULL_TREE;
7409 return build_real (type, real);
7412 /* Return true if the floating point expression T has an integer value.
7413 We also allow +Inf, -Inf and NaN to be considered integer values. */
7415 static bool
7416 integer_valued_real_p (tree t)
7418 switch (TREE_CODE (t))
7420 case FLOAT_EXPR:
7421 return true;
7423 case ABS_EXPR:
7424 case SAVE_EXPR:
7425 return integer_valued_real_p (TREE_OPERAND (t, 0));
7427 case COMPOUND_EXPR:
7428 case MODIFY_EXPR:
7429 case BIND_EXPR:
7430 return integer_valued_real_p (TREE_OPERAND (t, 1));
7432 case PLUS_EXPR:
7433 case MINUS_EXPR:
7434 case MULT_EXPR:
7435 case MIN_EXPR:
7436 case MAX_EXPR:
7437 return integer_valued_real_p (TREE_OPERAND (t, 0))
7438 && integer_valued_real_p (TREE_OPERAND (t, 1));
7440 case COND_EXPR:
7441 return integer_valued_real_p (TREE_OPERAND (t, 1))
7442 && integer_valued_real_p (TREE_OPERAND (t, 2));
7444 case REAL_CST:
7445 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7447 case NOP_EXPR:
7449 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7450 if (TREE_CODE (type) == INTEGER_TYPE)
7451 return true;
7452 if (TREE_CODE (type) == REAL_TYPE)
7453 return integer_valued_real_p (TREE_OPERAND (t, 0));
7454 break;
7457 case CALL_EXPR:
7458 switch (builtin_mathfn_code (t))
7460 CASE_FLT_FN (BUILT_IN_CEIL):
7461 CASE_FLT_FN (BUILT_IN_FLOOR):
7462 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7463 CASE_FLT_FN (BUILT_IN_RINT):
7464 CASE_FLT_FN (BUILT_IN_ROUND):
7465 CASE_FLT_FN (BUILT_IN_TRUNC):
7466 return true;
7468 CASE_FLT_FN (BUILT_IN_FMIN):
7469 CASE_FLT_FN (BUILT_IN_FMAX):
7470 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7471 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7473 default:
7474 break;
7476 break;
7478 default:
7479 break;
7481 return false;
7484 /* FNDECL is assumed to be a builtin where truncation can be propagated
7485 across (for instance floor((double)f) == (double)floorf (f).
7486 Do the transformation for a call with argument ARG. */
7488 static tree
7489 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7491 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7493 if (!validate_arg (arg, REAL_TYPE))
7494 return NULL_TREE;
7496 /* Integer rounding functions are idempotent. */
7497 if (fcode == builtin_mathfn_code (arg))
7498 return arg;
7500 /* If argument is already integer valued, and we don't need to worry
7501 about setting errno, there's no need to perform rounding. */
7502 if (! flag_errno_math && integer_valued_real_p (arg))
7503 return arg;
7505 if (optimize)
7507 tree arg0 = strip_float_extensions (arg);
7508 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7509 tree newtype = TREE_TYPE (arg0);
7510 tree decl;
7512 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7513 && (decl = mathfn_built_in (newtype, fcode)))
7514 return fold_convert (ftype,
7515 build_call_expr (decl, 1,
7516 fold_convert (newtype, arg0)));
7518 return NULL_TREE;
7521 /* FNDECL is assumed to be builtin which can narrow the FP type of
7522 the argument, for instance lround((double)f) -> lroundf (f).
7523 Do the transformation for a call with argument ARG. */
7525 static tree
7526 fold_fixed_mathfn (tree fndecl, tree arg)
7528 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7530 if (!validate_arg (arg, REAL_TYPE))
7531 return NULL_TREE;
7533 /* If argument is already integer valued, and we don't need to worry
7534 about setting errno, there's no need to perform rounding. */
7535 if (! flag_errno_math && integer_valued_real_p (arg))
7536 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7538 if (optimize)
7540 tree ftype = TREE_TYPE (arg);
7541 tree arg0 = strip_float_extensions (arg);
7542 tree newtype = TREE_TYPE (arg0);
7543 tree decl;
7545 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7546 && (decl = mathfn_built_in (newtype, fcode)))
7547 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7550 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7551 sizeof (long long) == sizeof (long). */
7552 if (TYPE_PRECISION (long_long_integer_type_node)
7553 == TYPE_PRECISION (long_integer_type_node))
7555 tree newfn = NULL_TREE;
7556 switch (fcode)
7558 CASE_FLT_FN (BUILT_IN_LLCEIL):
7559 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7560 break;
7562 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7563 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7564 break;
7566 CASE_FLT_FN (BUILT_IN_LLROUND):
7567 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7568 break;
7570 CASE_FLT_FN (BUILT_IN_LLRINT):
7571 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7572 break;
7574 default:
7575 break;
7578 if (newfn)
7580 tree newcall = build_call_expr(newfn, 1, arg);
7581 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7585 return NULL_TREE;
7588 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7589 return type. Return NULL_TREE if no simplification can be made. */
7591 static tree
7592 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7594 tree res;
7596 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7597 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7598 return NULL_TREE;
7600 /* Calculate the result when the argument is a constant. */
7601 if (TREE_CODE (arg) == COMPLEX_CST
7602 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7603 type, mpfr_hypot)))
7604 return res;
7606 if (TREE_CODE (arg) == COMPLEX_EXPR)
7608 tree real = TREE_OPERAND (arg, 0);
7609 tree imag = TREE_OPERAND (arg, 1);
7611 /* If either part is zero, cabs is fabs of the other. */
7612 if (real_zerop (real))
7613 return fold_build1 (ABS_EXPR, type, imag);
7614 if (real_zerop (imag))
7615 return fold_build1 (ABS_EXPR, type, real);
7617 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7618 if (flag_unsafe_math_optimizations
7619 && operand_equal_p (real, imag, OEP_PURE_SAME))
7621 const REAL_VALUE_TYPE sqrt2_trunc
7622 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7623 STRIP_NOPS (real);
7624 return fold_build2 (MULT_EXPR, type,
7625 fold_build1 (ABS_EXPR, type, real),
7626 build_real (type, sqrt2_trunc));
7630 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7631 if (TREE_CODE (arg) == NEGATE_EXPR
7632 || TREE_CODE (arg) == CONJ_EXPR)
7633 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7635 /* Don't do this when optimizing for size. */
7636 if (flag_unsafe_math_optimizations
7637 && optimize && optimize_function_for_speed_p (cfun))
7639 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7641 if (sqrtfn != NULL_TREE)
7643 tree rpart, ipart, result;
7645 arg = builtin_save_expr (arg);
7647 rpart = fold_build1 (REALPART_EXPR, type, arg);
7648 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7650 rpart = builtin_save_expr (rpart);
7651 ipart = builtin_save_expr (ipart);
7653 result = fold_build2 (PLUS_EXPR, type,
7654 fold_build2 (MULT_EXPR, type,
7655 rpart, rpart),
7656 fold_build2 (MULT_EXPR, type,
7657 ipart, ipart));
7659 return build_call_expr (sqrtfn, 1, result);
7663 return NULL_TREE;
7666 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7667 Return NULL_TREE if no simplification can be made. */
7669 static tree
7670 fold_builtin_sqrt (tree arg, tree type)
7673 enum built_in_function fcode;
7674 tree res;
7676 if (!validate_arg (arg, REAL_TYPE))
7677 return NULL_TREE;
7679 /* Calculate the result when the argument is a constant. */
7680 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7681 return res;
7683 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7684 fcode = builtin_mathfn_code (arg);
7685 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7687 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7688 arg = fold_build2 (MULT_EXPR, type,
7689 CALL_EXPR_ARG (arg, 0),
7690 build_real (type, dconsthalf));
7691 return build_call_expr (expfn, 1, arg);
7694 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7695 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7697 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7699 if (powfn)
7701 tree arg0 = CALL_EXPR_ARG (arg, 0);
7702 tree tree_root;
7703 /* The inner root was either sqrt or cbrt. */
7704 /* This was a conditional expression but it triggered a bug
7705 in Sun C 5.5. */
7706 REAL_VALUE_TYPE dconstroot;
7707 if (BUILTIN_SQRT_P (fcode))
7708 dconstroot = dconsthalf;
7709 else
7710 dconstroot = dconst_third ();
7712 /* Adjust for the outer root. */
7713 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7714 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7715 tree_root = build_real (type, dconstroot);
7716 return build_call_expr (powfn, 2, arg0, tree_root);
7720 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7721 if (flag_unsafe_math_optimizations
7722 && (fcode == BUILT_IN_POW
7723 || fcode == BUILT_IN_POWF
7724 || fcode == BUILT_IN_POWL))
7726 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7727 tree arg0 = CALL_EXPR_ARG (arg, 0);
7728 tree arg1 = CALL_EXPR_ARG (arg, 1);
7729 tree narg1;
7730 if (!tree_expr_nonnegative_p (arg0))
7731 arg0 = build1 (ABS_EXPR, type, arg0);
7732 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7733 build_real (type, dconsthalf));
7734 return build_call_expr (powfn, 2, arg0, narg1);
7737 return NULL_TREE;
7740 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7741 Return NULL_TREE if no simplification can be made. */
7743 static tree
7744 fold_builtin_cbrt (tree arg, tree type)
7746 const enum built_in_function fcode = builtin_mathfn_code (arg);
7747 tree res;
7749 if (!validate_arg (arg, REAL_TYPE))
7750 return NULL_TREE;
7752 /* Calculate the result when the argument is a constant. */
7753 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7754 return res;
7756 if (flag_unsafe_math_optimizations)
7758 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7759 if (BUILTIN_EXPONENT_P (fcode))
7761 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7762 const REAL_VALUE_TYPE third_trunc =
7763 real_value_truncate (TYPE_MODE (type), dconst_third ());
7764 arg = fold_build2 (MULT_EXPR, type,
7765 CALL_EXPR_ARG (arg, 0),
7766 build_real (type, third_trunc));
7767 return build_call_expr (expfn, 1, arg);
7770 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7771 if (BUILTIN_SQRT_P (fcode))
7773 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7775 if (powfn)
7777 tree arg0 = CALL_EXPR_ARG (arg, 0);
7778 tree tree_root;
7779 REAL_VALUE_TYPE dconstroot = dconst_third ();
7781 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7782 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7783 tree_root = build_real (type, dconstroot);
7784 return build_call_expr (powfn, 2, arg0, tree_root);
7788 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7789 if (BUILTIN_CBRT_P (fcode))
7791 tree arg0 = CALL_EXPR_ARG (arg, 0);
7792 if (tree_expr_nonnegative_p (arg0))
7794 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7796 if (powfn)
7798 tree tree_root;
7799 REAL_VALUE_TYPE dconstroot;
7801 real_arithmetic (&dconstroot, MULT_EXPR,
7802 dconst_third_ptr (), dconst_third_ptr ());
7803 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7804 tree_root = build_real (type, dconstroot);
7805 return build_call_expr (powfn, 2, arg0, tree_root);
7810 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7811 if (fcode == BUILT_IN_POW
7812 || fcode == BUILT_IN_POWF
7813 || fcode == BUILT_IN_POWL)
7815 tree arg00 = CALL_EXPR_ARG (arg, 0);
7816 tree arg01 = CALL_EXPR_ARG (arg, 1);
7817 if (tree_expr_nonnegative_p (arg00))
7819 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7820 const REAL_VALUE_TYPE dconstroot
7821 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7822 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7823 build_real (type, dconstroot));
7824 return build_call_expr (powfn, 2, arg00, narg01);
7828 return NULL_TREE;
7831 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7832 TYPE is the type of the return value. Return NULL_TREE if no
7833 simplification can be made. */
7835 static tree
7836 fold_builtin_cos (tree arg, tree type, tree fndecl)
7838 tree res, narg;
7840 if (!validate_arg (arg, REAL_TYPE))
7841 return NULL_TREE;
7843 /* Calculate the result when the argument is a constant. */
7844 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7845 return res;
7847 /* Optimize cos(-x) into cos (x). */
7848 if ((narg = fold_strip_sign_ops (arg)))
7849 return build_call_expr (fndecl, 1, narg);
7851 return NULL_TREE;
7854 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7855 Return NULL_TREE if no simplification can be made. */
7857 static tree
7858 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7860 if (validate_arg (arg, REAL_TYPE))
7862 tree res, narg;
7864 /* Calculate the result when the argument is a constant. */
7865 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7866 return res;
7868 /* Optimize cosh(-x) into cosh (x). */
7869 if ((narg = fold_strip_sign_ops (arg)))
7870 return build_call_expr (fndecl, 1, narg);
7873 return NULL_TREE;
7876 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7877 Return NULL_TREE if no simplification can be made. */
7879 static tree
7880 fold_builtin_tan (tree arg, tree type)
7882 enum built_in_function fcode;
7883 tree res;
7885 if (!validate_arg (arg, REAL_TYPE))
7886 return NULL_TREE;
7888 /* Calculate the result when the argument is a constant. */
7889 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7890 return res;
7892 /* Optimize tan(atan(x)) = x. */
7893 fcode = builtin_mathfn_code (arg);
7894 if (flag_unsafe_math_optimizations
7895 && (fcode == BUILT_IN_ATAN
7896 || fcode == BUILT_IN_ATANF
7897 || fcode == BUILT_IN_ATANL))
7898 return CALL_EXPR_ARG (arg, 0);
7900 return NULL_TREE;
7903 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7904 NULL_TREE if no simplification can be made. */
7906 static tree
7907 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7909 tree type;
7910 tree res, fn, call;
7912 if (!validate_arg (arg0, REAL_TYPE)
7913 || !validate_arg (arg1, POINTER_TYPE)
7914 || !validate_arg (arg2, POINTER_TYPE))
7915 return NULL_TREE;
7917 type = TREE_TYPE (arg0);
7919 /* Calculate the result when the argument is a constant. */
7920 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7921 return res;
7923 /* Canonicalize sincos to cexpi. */
7924 if (!TARGET_C99_FUNCTIONS)
7925 return NULL_TREE;
7926 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7927 if (!fn)
7928 return NULL_TREE;
7930 call = build_call_expr (fn, 1, arg0);
7931 call = builtin_save_expr (call);
7933 return build2 (COMPOUND_EXPR, type,
7934 build2 (MODIFY_EXPR, void_type_node,
7935 build_fold_indirect_ref (arg1),
7936 build1 (IMAGPART_EXPR, type, call)),
7937 build2 (MODIFY_EXPR, void_type_node,
7938 build_fold_indirect_ref (arg2),
7939 build1 (REALPART_EXPR, type, call)));
7942 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7943 NULL_TREE if no simplification can be made. */
7945 static tree
7946 fold_builtin_cexp (tree arg0, tree type)
7948 tree rtype;
7949 tree realp, imagp, ifn;
7951 if (!validate_arg (arg0, COMPLEX_TYPE))
7952 return NULL_TREE;
7954 rtype = TREE_TYPE (TREE_TYPE (arg0));
7956 /* In case we can figure out the real part of arg0 and it is constant zero
7957 fold to cexpi. */
7958 if (!TARGET_C99_FUNCTIONS)
7959 return NULL_TREE;
7960 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7961 if (!ifn)
7962 return NULL_TREE;
7964 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7965 && real_zerop (realp))
7967 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7968 return build_call_expr (ifn, 1, narg);
7971 /* In case we can easily decompose real and imaginary parts split cexp
7972 to exp (r) * cexpi (i). */
7973 if (flag_unsafe_math_optimizations
7974 && realp)
7976 tree rfn, rcall, icall;
7978 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7979 if (!rfn)
7980 return NULL_TREE;
7982 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7983 if (!imagp)
7984 return NULL_TREE;
7986 icall = build_call_expr (ifn, 1, imagp);
7987 icall = builtin_save_expr (icall);
7988 rcall = build_call_expr (rfn, 1, realp);
7989 rcall = builtin_save_expr (rcall);
7990 return fold_build2 (COMPLEX_EXPR, type,
7991 fold_build2 (MULT_EXPR, rtype,
7992 rcall,
7993 fold_build1 (REALPART_EXPR, rtype, icall)),
7994 fold_build2 (MULT_EXPR, rtype,
7995 rcall,
7996 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7999 return NULL_TREE;
8002 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8003 Return NULL_TREE if no simplification can be made. */
8005 static tree
8006 fold_builtin_trunc (tree fndecl, tree arg)
8008 if (!validate_arg (arg, REAL_TYPE))
8009 return NULL_TREE;
8011 /* Optimize trunc of constant value. */
8012 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8014 REAL_VALUE_TYPE r, x;
8015 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8017 x = TREE_REAL_CST (arg);
8018 real_trunc (&r, TYPE_MODE (type), &x);
8019 return build_real (type, r);
8022 return fold_trunc_transparent_mathfn (fndecl, arg);
8025 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8026 Return NULL_TREE if no simplification can be made. */
8028 static tree
8029 fold_builtin_floor (tree fndecl, tree arg)
8031 if (!validate_arg (arg, REAL_TYPE))
8032 return NULL_TREE;
8034 /* Optimize floor of constant value. */
8035 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8037 REAL_VALUE_TYPE x;
8039 x = TREE_REAL_CST (arg);
8040 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8042 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8043 REAL_VALUE_TYPE r;
8045 real_floor (&r, TYPE_MODE (type), &x);
8046 return build_real (type, r);
8050 /* Fold floor (x) where x is nonnegative to trunc (x). */
8051 if (tree_expr_nonnegative_p (arg))
8053 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8054 if (truncfn)
8055 return build_call_expr (truncfn, 1, arg);
8058 return fold_trunc_transparent_mathfn (fndecl, arg);
8061 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8062 Return NULL_TREE if no simplification can be made. */
8064 static tree
8065 fold_builtin_ceil (tree fndecl, tree arg)
8067 if (!validate_arg (arg, REAL_TYPE))
8068 return NULL_TREE;
8070 /* Optimize ceil of constant value. */
8071 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8073 REAL_VALUE_TYPE x;
8075 x = TREE_REAL_CST (arg);
8076 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8078 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8079 REAL_VALUE_TYPE r;
8081 real_ceil (&r, TYPE_MODE (type), &x);
8082 return build_real (type, r);
8086 return fold_trunc_transparent_mathfn (fndecl, arg);
8089 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8090 Return NULL_TREE if no simplification can be made. */
8092 static tree
8093 fold_builtin_round (tree fndecl, tree arg)
8095 if (!validate_arg (arg, REAL_TYPE))
8096 return NULL_TREE;
8098 /* Optimize round of constant value. */
8099 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8101 REAL_VALUE_TYPE x;
8103 x = TREE_REAL_CST (arg);
8104 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8106 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8107 REAL_VALUE_TYPE r;
8109 real_round (&r, TYPE_MODE (type), &x);
8110 return build_real (type, r);
8114 return fold_trunc_transparent_mathfn (fndecl, arg);
8117 /* Fold function call to builtin lround, lroundf or lroundl (or the
8118 corresponding long long versions) and other rounding functions. ARG
8119 is the argument to the call. Return NULL_TREE if no simplification
8120 can be made. */
8122 static tree
8123 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8125 if (!validate_arg (arg, REAL_TYPE))
8126 return NULL_TREE;
8128 /* Optimize lround of constant value. */
8129 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8131 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8133 if (real_isfinite (&x))
8135 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8136 tree ftype = TREE_TYPE (arg);
8137 unsigned HOST_WIDE_INT lo2;
8138 HOST_WIDE_INT hi, lo;
8139 REAL_VALUE_TYPE r;
8141 switch (DECL_FUNCTION_CODE (fndecl))
8143 CASE_FLT_FN (BUILT_IN_LFLOOR):
8144 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8145 real_floor (&r, TYPE_MODE (ftype), &x);
8146 break;
8148 CASE_FLT_FN (BUILT_IN_LCEIL):
8149 CASE_FLT_FN (BUILT_IN_LLCEIL):
8150 real_ceil (&r, TYPE_MODE (ftype), &x);
8151 break;
8153 CASE_FLT_FN (BUILT_IN_LROUND):
8154 CASE_FLT_FN (BUILT_IN_LLROUND):
8155 real_round (&r, TYPE_MODE (ftype), &x);
8156 break;
8158 default:
8159 gcc_unreachable ();
8162 REAL_VALUE_TO_INT (&lo, &hi, r);
8163 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8164 return build_int_cst_wide (itype, lo2, hi);
8168 switch (DECL_FUNCTION_CODE (fndecl))
8170 CASE_FLT_FN (BUILT_IN_LFLOOR):
8171 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8172 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8173 if (tree_expr_nonnegative_p (arg))
8174 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8175 arg);
8176 break;
8177 default:;
8180 return fold_fixed_mathfn (fndecl, arg);
8183 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8184 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8185 the argument to the call. Return NULL_TREE if no simplification can
8186 be made. */
8188 static tree
8189 fold_builtin_bitop (tree fndecl, tree arg)
8191 if (!validate_arg (arg, INTEGER_TYPE))
8192 return NULL_TREE;
8194 /* Optimize for constant argument. */
8195 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8197 HOST_WIDE_INT hi, width, result;
8198 unsigned HOST_WIDE_INT lo;
8199 tree type;
8201 type = TREE_TYPE (arg);
8202 width = TYPE_PRECISION (type);
8203 lo = TREE_INT_CST_LOW (arg);
8205 /* Clear all the bits that are beyond the type's precision. */
8206 if (width > HOST_BITS_PER_WIDE_INT)
8208 hi = TREE_INT_CST_HIGH (arg);
8209 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8210 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8212 else
8214 hi = 0;
8215 if (width < HOST_BITS_PER_WIDE_INT)
8216 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8219 switch (DECL_FUNCTION_CODE (fndecl))
8221 CASE_INT_FN (BUILT_IN_FFS):
8222 if (lo != 0)
8223 result = exact_log2 (lo & -lo) + 1;
8224 else if (hi != 0)
8225 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8226 else
8227 result = 0;
8228 break;
8230 CASE_INT_FN (BUILT_IN_CLZ):
8231 if (hi != 0)
8232 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8233 else if (lo != 0)
8234 result = width - floor_log2 (lo) - 1;
8235 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8236 result = width;
8237 break;
8239 CASE_INT_FN (BUILT_IN_CTZ):
8240 if (lo != 0)
8241 result = exact_log2 (lo & -lo);
8242 else if (hi != 0)
8243 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8244 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8245 result = width;
8246 break;
8248 CASE_INT_FN (BUILT_IN_POPCOUNT):
8249 result = 0;
8250 while (lo)
8251 result++, lo &= lo - 1;
8252 while (hi)
8253 result++, hi &= hi - 1;
8254 break;
8256 CASE_INT_FN (BUILT_IN_PARITY):
8257 result = 0;
8258 while (lo)
8259 result++, lo &= lo - 1;
8260 while (hi)
8261 result++, hi &= hi - 1;
8262 result &= 1;
8263 break;
8265 default:
8266 gcc_unreachable ();
8269 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8272 return NULL_TREE;
8275 /* Fold function call to builtin_bswap and the long and long long
8276 variants. Return NULL_TREE if no simplification can be made. */
8277 static tree
8278 fold_builtin_bswap (tree fndecl, tree arg)
8280 if (! validate_arg (arg, INTEGER_TYPE))
8281 return NULL_TREE;
8283 /* Optimize constant value. */
8284 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8286 HOST_WIDE_INT hi, width, r_hi = 0;
8287 unsigned HOST_WIDE_INT lo, r_lo = 0;
8288 tree type;
8290 type = TREE_TYPE (arg);
8291 width = TYPE_PRECISION (type);
8292 lo = TREE_INT_CST_LOW (arg);
8293 hi = TREE_INT_CST_HIGH (arg);
8295 switch (DECL_FUNCTION_CODE (fndecl))
8297 case BUILT_IN_BSWAP32:
8298 case BUILT_IN_BSWAP64:
8300 int s;
8302 for (s = 0; s < width; s += 8)
8304 int d = width - s - 8;
8305 unsigned HOST_WIDE_INT byte;
8307 if (s < HOST_BITS_PER_WIDE_INT)
8308 byte = (lo >> s) & 0xff;
8309 else
8310 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8312 if (d < HOST_BITS_PER_WIDE_INT)
8313 r_lo |= byte << d;
8314 else
8315 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8319 break;
8321 default:
8322 gcc_unreachable ();
8325 if (width < HOST_BITS_PER_WIDE_INT)
8326 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8327 else
8328 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8331 return NULL_TREE;
8334 /* Return true if EXPR is the real constant contained in VALUE. */
8336 static bool
8337 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8339 STRIP_NOPS (expr);
8341 return ((TREE_CODE (expr) == REAL_CST
8342 && !TREE_OVERFLOW (expr)
8343 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8344 || (TREE_CODE (expr) == COMPLEX_CST
8345 && real_dconstp (TREE_REALPART (expr), value)
8346 && real_zerop (TREE_IMAGPART (expr))));
8349 /* A subroutine of fold_builtin to fold the various logarithmic
8350 functions. Return NULL_TREE if no simplification can me made.
8351 FUNC is the corresponding MPFR logarithm function. */
8353 static tree
8354 fold_builtin_logarithm (tree fndecl, tree arg,
8355 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8357 if (validate_arg (arg, REAL_TYPE))
8359 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8360 tree res;
8361 const enum built_in_function fcode = builtin_mathfn_code (arg);
8363 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8364 instead we'll look for 'e' truncated to MODE. So only do
8365 this if flag_unsafe_math_optimizations is set. */
8366 if (flag_unsafe_math_optimizations && func == mpfr_log)
8368 const REAL_VALUE_TYPE e_truncated =
8369 real_value_truncate (TYPE_MODE (type), dconst_e ());
8370 if (real_dconstp (arg, &e_truncated))
8371 return build_real (type, dconst1);
8374 /* Calculate the result when the argument is a constant. */
8375 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8376 return res;
8378 /* Special case, optimize logN(expN(x)) = x. */
8379 if (flag_unsafe_math_optimizations
8380 && ((func == mpfr_log
8381 && (fcode == BUILT_IN_EXP
8382 || fcode == BUILT_IN_EXPF
8383 || fcode == BUILT_IN_EXPL))
8384 || (func == mpfr_log2
8385 && (fcode == BUILT_IN_EXP2
8386 || fcode == BUILT_IN_EXP2F
8387 || fcode == BUILT_IN_EXP2L))
8388 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8389 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8391 /* Optimize logN(func()) for various exponential functions. We
8392 want to determine the value "x" and the power "exponent" in
8393 order to transform logN(x**exponent) into exponent*logN(x). */
8394 if (flag_unsafe_math_optimizations)
8396 tree exponent = 0, x = 0;
8398 switch (fcode)
8400 CASE_FLT_FN (BUILT_IN_EXP):
8401 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8402 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8403 dconst_e ()));
8404 exponent = CALL_EXPR_ARG (arg, 0);
8405 break;
8406 CASE_FLT_FN (BUILT_IN_EXP2):
8407 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8408 x = build_real (type, dconst2);
8409 exponent = CALL_EXPR_ARG (arg, 0);
8410 break;
8411 CASE_FLT_FN (BUILT_IN_EXP10):
8412 CASE_FLT_FN (BUILT_IN_POW10):
8413 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8415 REAL_VALUE_TYPE dconst10;
8416 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8417 x = build_real (type, dconst10);
8419 exponent = CALL_EXPR_ARG (arg, 0);
8420 break;
8421 CASE_FLT_FN (BUILT_IN_SQRT):
8422 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8423 x = CALL_EXPR_ARG (arg, 0);
8424 exponent = build_real (type, dconsthalf);
8425 break;
8426 CASE_FLT_FN (BUILT_IN_CBRT):
8427 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8428 x = CALL_EXPR_ARG (arg, 0);
8429 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8430 dconst_third ()));
8431 break;
8432 CASE_FLT_FN (BUILT_IN_POW):
8433 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8434 x = CALL_EXPR_ARG (arg, 0);
8435 exponent = CALL_EXPR_ARG (arg, 1);
8436 break;
8437 default:
8438 break;
8441 /* Now perform the optimization. */
8442 if (x && exponent)
8444 tree logfn = build_call_expr (fndecl, 1, x);
8445 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8450 return NULL_TREE;
8453 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8454 NULL_TREE if no simplification can be made. */
8456 static tree
8457 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8459 tree res, narg0, narg1;
8461 if (!validate_arg (arg0, REAL_TYPE)
8462 || !validate_arg (arg1, REAL_TYPE))
8463 return NULL_TREE;
8465 /* Calculate the result when the argument is a constant. */
8466 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8467 return res;
8469 /* If either argument to hypot has a negate or abs, strip that off.
8470 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8471 narg0 = fold_strip_sign_ops (arg0);
8472 narg1 = fold_strip_sign_ops (arg1);
8473 if (narg0 || narg1)
8475 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8476 narg1 ? narg1 : arg1);
8479 /* If either argument is zero, hypot is fabs of the other. */
8480 if (real_zerop (arg0))
8481 return fold_build1 (ABS_EXPR, type, arg1);
8482 else if (real_zerop (arg1))
8483 return fold_build1 (ABS_EXPR, type, arg0);
8485 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8486 if (flag_unsafe_math_optimizations
8487 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8489 const REAL_VALUE_TYPE sqrt2_trunc
8490 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8491 return fold_build2 (MULT_EXPR, type,
8492 fold_build1 (ABS_EXPR, type, arg0),
8493 build_real (type, sqrt2_trunc));
8496 return NULL_TREE;
8500 /* Fold a builtin function call to pow, powf, or powl. Return
8501 NULL_TREE if no simplification can be made. */
8502 static tree
8503 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8505 tree res;
8507 if (!validate_arg (arg0, REAL_TYPE)
8508 || !validate_arg (arg1, REAL_TYPE))
8509 return NULL_TREE;
8511 /* Calculate the result when the argument is a constant. */
8512 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8513 return res;
8515 /* Optimize pow(1.0,y) = 1.0. */
8516 if (real_onep (arg0))
8517 return omit_one_operand (type, build_real (type, dconst1), arg1);
8519 if (TREE_CODE (arg1) == REAL_CST
8520 && !TREE_OVERFLOW (arg1))
8522 REAL_VALUE_TYPE cint;
8523 REAL_VALUE_TYPE c;
8524 HOST_WIDE_INT n;
8526 c = TREE_REAL_CST (arg1);
8528 /* Optimize pow(x,0.0) = 1.0. */
8529 if (REAL_VALUES_EQUAL (c, dconst0))
8530 return omit_one_operand (type, build_real (type, dconst1),
8531 arg0);
8533 /* Optimize pow(x,1.0) = x. */
8534 if (REAL_VALUES_EQUAL (c, dconst1))
8535 return arg0;
8537 /* Optimize pow(x,-1.0) = 1.0/x. */
8538 if (REAL_VALUES_EQUAL (c, dconstm1))
8539 return fold_build2 (RDIV_EXPR, type,
8540 build_real (type, dconst1), arg0);
8542 /* Optimize pow(x,0.5) = sqrt(x). */
8543 if (flag_unsafe_math_optimizations
8544 && REAL_VALUES_EQUAL (c, dconsthalf))
8546 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8548 if (sqrtfn != NULL_TREE)
8549 return build_call_expr (sqrtfn, 1, arg0);
8552 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8553 if (flag_unsafe_math_optimizations)
8555 const REAL_VALUE_TYPE dconstroot
8556 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8558 if (REAL_VALUES_EQUAL (c, dconstroot))
8560 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8561 if (cbrtfn != NULL_TREE)
8562 return build_call_expr (cbrtfn, 1, arg0);
8566 /* Check for an integer exponent. */
8567 n = real_to_integer (&c);
8568 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8569 if (real_identical (&c, &cint))
8571 /* Attempt to evaluate pow at compile-time, unless this should
8572 raise an exception. */
8573 if (TREE_CODE (arg0) == REAL_CST
8574 && !TREE_OVERFLOW (arg0)
8575 && (n > 0
8576 || (!flag_trapping_math && !flag_errno_math)
8577 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8579 REAL_VALUE_TYPE x;
8580 bool inexact;
8582 x = TREE_REAL_CST (arg0);
8583 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8584 if (flag_unsafe_math_optimizations || !inexact)
8585 return build_real (type, x);
8588 /* Strip sign ops from even integer powers. */
8589 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8591 tree narg0 = fold_strip_sign_ops (arg0);
8592 if (narg0)
8593 return build_call_expr (fndecl, 2, narg0, arg1);
8598 if (flag_unsafe_math_optimizations)
8600 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8602 /* Optimize pow(expN(x),y) = expN(x*y). */
8603 if (BUILTIN_EXPONENT_P (fcode))
8605 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8606 tree arg = CALL_EXPR_ARG (arg0, 0);
8607 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8608 return build_call_expr (expfn, 1, arg);
8611 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8612 if (BUILTIN_SQRT_P (fcode))
8614 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8615 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8616 build_real (type, dconsthalf));
8617 return build_call_expr (fndecl, 2, narg0, narg1);
8620 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8621 if (BUILTIN_CBRT_P (fcode))
8623 tree arg = CALL_EXPR_ARG (arg0, 0);
8624 if (tree_expr_nonnegative_p (arg))
8626 const REAL_VALUE_TYPE dconstroot
8627 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8628 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8629 build_real (type, dconstroot));
8630 return build_call_expr (fndecl, 2, arg, narg1);
8634 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8635 if (fcode == BUILT_IN_POW
8636 || fcode == BUILT_IN_POWF
8637 || fcode == BUILT_IN_POWL)
8639 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8640 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8641 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8642 return build_call_expr (fndecl, 2, arg00, narg1);
8646 return NULL_TREE;
8649 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8650 Return NULL_TREE if no simplification can be made. */
8651 static tree
8652 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8653 tree arg0, tree arg1, tree type)
8655 if (!validate_arg (arg0, REAL_TYPE)
8656 || !validate_arg (arg1, INTEGER_TYPE))
8657 return NULL_TREE;
8659 /* Optimize pow(1.0,y) = 1.0. */
8660 if (real_onep (arg0))
8661 return omit_one_operand (type, build_real (type, dconst1), arg1);
8663 if (host_integerp (arg1, 0))
8665 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8667 /* Evaluate powi at compile-time. */
8668 if (TREE_CODE (arg0) == REAL_CST
8669 && !TREE_OVERFLOW (arg0))
8671 REAL_VALUE_TYPE x;
8672 x = TREE_REAL_CST (arg0);
8673 real_powi (&x, TYPE_MODE (type), &x, c);
8674 return build_real (type, x);
8677 /* Optimize pow(x,0) = 1.0. */
8678 if (c == 0)
8679 return omit_one_operand (type, build_real (type, dconst1),
8680 arg0);
8682 /* Optimize pow(x,1) = x. */
8683 if (c == 1)
8684 return arg0;
8686 /* Optimize pow(x,-1) = 1.0/x. */
8687 if (c == -1)
8688 return fold_build2 (RDIV_EXPR, type,
8689 build_real (type, dconst1), arg0);
8692 return NULL_TREE;
8695 /* A subroutine of fold_builtin to fold the various exponent
8696 functions. Return NULL_TREE if no simplification can be made.
8697 FUNC is the corresponding MPFR exponent function. */
8699 static tree
8700 fold_builtin_exponent (tree fndecl, tree arg,
8701 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8703 if (validate_arg (arg, REAL_TYPE))
8705 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8706 tree res;
8708 /* Calculate the result when the argument is a constant. */
8709 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8710 return res;
8712 /* Optimize expN(logN(x)) = x. */
8713 if (flag_unsafe_math_optimizations)
8715 const enum built_in_function fcode = builtin_mathfn_code (arg);
8717 if ((func == mpfr_exp
8718 && (fcode == BUILT_IN_LOG
8719 || fcode == BUILT_IN_LOGF
8720 || fcode == BUILT_IN_LOGL))
8721 || (func == mpfr_exp2
8722 && (fcode == BUILT_IN_LOG2
8723 || fcode == BUILT_IN_LOG2F
8724 || fcode == BUILT_IN_LOG2L))
8725 || (func == mpfr_exp10
8726 && (fcode == BUILT_IN_LOG10
8727 || fcode == BUILT_IN_LOG10F
8728 || fcode == BUILT_IN_LOG10L)))
8729 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8733 return NULL_TREE;
8736 /* Return true if VAR is a VAR_DECL or a component thereof. */
8738 static bool
8739 var_decl_component_p (tree var)
8741 tree inner = var;
8742 while (handled_component_p (inner))
8743 inner = TREE_OPERAND (inner, 0);
8744 return SSA_VAR_P (inner);
8747 /* Fold function call to builtin memset. Return
8748 NULL_TREE if no simplification can be made. */
8750 static tree
8751 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8753 tree var, ret;
8754 unsigned HOST_WIDE_INT length, cval;
8756 if (! validate_arg (dest, POINTER_TYPE)
8757 || ! validate_arg (c, INTEGER_TYPE)
8758 || ! validate_arg (len, INTEGER_TYPE))
8759 return NULL_TREE;
8761 if (! host_integerp (len, 1))
8762 return NULL_TREE;
8764 /* If the LEN parameter is zero, return DEST. */
8765 if (integer_zerop (len))
8766 return omit_one_operand (type, dest, c);
8768 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8769 return NULL_TREE;
8771 var = dest;
8772 STRIP_NOPS (var);
8773 if (TREE_CODE (var) != ADDR_EXPR)
8774 return NULL_TREE;
8776 var = TREE_OPERAND (var, 0);
8777 if (TREE_THIS_VOLATILE (var))
8778 return NULL_TREE;
8780 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8781 && !POINTER_TYPE_P (TREE_TYPE (var)))
8782 return NULL_TREE;
8784 if (! var_decl_component_p (var))
8785 return NULL_TREE;
8787 length = tree_low_cst (len, 1);
8788 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8789 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8790 < (int) length)
8791 return NULL_TREE;
8793 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8794 return NULL_TREE;
8796 if (integer_zerop (c))
8797 cval = 0;
8798 else
8800 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8801 return NULL_TREE;
8803 cval = tree_low_cst (c, 1);
8804 cval &= 0xff;
8805 cval |= cval << 8;
8806 cval |= cval << 16;
8807 cval |= (cval << 31) << 1;
8810 ret = build_int_cst_type (TREE_TYPE (var), cval);
8811 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8812 if (ignore)
8813 return ret;
8815 return omit_one_operand (type, dest, ret);
8818 /* Fold function call to builtin memset. Return
8819 NULL_TREE if no simplification can be made. */
8821 static tree
8822 fold_builtin_bzero (tree dest, tree size, bool ignore)
8824 if (! validate_arg (dest, POINTER_TYPE)
8825 || ! validate_arg (size, INTEGER_TYPE))
8826 return NULL_TREE;
8828 if (!ignore)
8829 return NULL_TREE;
8831 /* New argument list transforming bzero(ptr x, int y) to
8832 memset(ptr x, int 0, size_t y). This is done this way
8833 so that if it isn't expanded inline, we fallback to
8834 calling bzero instead of memset. */
8836 return fold_builtin_memset (dest, integer_zero_node,
8837 fold_convert (sizetype, size),
8838 void_type_node, ignore);
8841 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8842 NULL_TREE if no simplification can be made.
8843 If ENDP is 0, return DEST (like memcpy).
8844 If ENDP is 1, return DEST+LEN (like mempcpy).
8845 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8846 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8847 (memmove). */
8849 static tree
8850 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8852 tree destvar, srcvar, expr;
8854 if (! validate_arg (dest, POINTER_TYPE)
8855 || ! validate_arg (src, POINTER_TYPE)
8856 || ! validate_arg (len, INTEGER_TYPE))
8857 return NULL_TREE;
8859 /* If the LEN parameter is zero, return DEST. */
8860 if (integer_zerop (len))
8861 return omit_one_operand (type, dest, src);
8863 /* If SRC and DEST are the same (and not volatile), return
8864 DEST{,+LEN,+LEN-1}. */
8865 if (operand_equal_p (src, dest, 0))
8866 expr = len;
8867 else
8869 tree srctype, desttype;
8870 int src_align, dest_align;
8872 if (endp == 3)
8874 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8875 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8877 /* Both DEST and SRC must be pointer types.
8878 ??? This is what old code did. Is the testing for pointer types
8879 really mandatory?
8881 If either SRC is readonly or length is 1, we can use memcpy. */
8882 if (dest_align && src_align
8883 && (readonly_data_expr (src)
8884 || (host_integerp (len, 1)
8885 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8886 tree_low_cst (len, 1)))))
8888 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8889 if (!fn)
8890 return NULL_TREE;
8891 return build_call_expr (fn, 3, dest, src, len);
8893 return NULL_TREE;
8896 if (!host_integerp (len, 0))
8897 return NULL_TREE;
8898 /* FIXME:
8899 This logic lose for arguments like (type *)malloc (sizeof (type)),
8900 since we strip the casts of up to VOID return value from malloc.
8901 Perhaps we ought to inherit type from non-VOID argument here? */
8902 STRIP_NOPS (src);
8903 STRIP_NOPS (dest);
8904 srctype = TREE_TYPE (TREE_TYPE (src));
8905 desttype = TREE_TYPE (TREE_TYPE (dest));
8906 if (!srctype || !desttype
8907 || !TYPE_SIZE_UNIT (srctype)
8908 || !TYPE_SIZE_UNIT (desttype)
8909 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8910 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8911 || TYPE_VOLATILE (srctype)
8912 || TYPE_VOLATILE (desttype))
8913 return NULL_TREE;
8915 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8916 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8917 if (dest_align < (int) TYPE_ALIGN (desttype)
8918 || src_align < (int) TYPE_ALIGN (srctype))
8919 return NULL_TREE;
8921 if (!ignore)
8922 dest = builtin_save_expr (dest);
8924 srcvar = NULL_TREE;
8925 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8927 srcvar = build_fold_indirect_ref (src);
8928 if (TREE_THIS_VOLATILE (srcvar))
8929 return NULL_TREE;
8930 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8931 srcvar = NULL_TREE;
8932 /* With memcpy, it is possible to bypass aliasing rules, so without
8933 this check i.e. execute/20060930-2.c would be misoptimized,
8934 because it use conflicting alias set to hold argument for the
8935 memcpy call. This check is probably unnecessary with
8936 -fno-strict-aliasing. Similarly for destvar. See also
8937 PR29286. */
8938 else if (!var_decl_component_p (srcvar))
8939 srcvar = NULL_TREE;
8942 destvar = NULL_TREE;
8943 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8945 destvar = build_fold_indirect_ref (dest);
8946 if (TREE_THIS_VOLATILE (destvar))
8947 return NULL_TREE;
8948 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8949 destvar = NULL_TREE;
8950 else if (!var_decl_component_p (destvar))
8951 destvar = NULL_TREE;
8954 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8955 return NULL_TREE;
8957 if (srcvar == NULL_TREE)
8959 tree srcptype;
8960 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8961 return NULL_TREE;
8963 srctype = build_qualified_type (desttype, 0);
8964 if (src_align < (int) TYPE_ALIGN (srctype))
8966 if (AGGREGATE_TYPE_P (srctype)
8967 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8968 return NULL_TREE;
8970 srctype = build_variant_type_copy (srctype);
8971 TYPE_ALIGN (srctype) = src_align;
8972 TYPE_USER_ALIGN (srctype) = 1;
8973 TYPE_PACKED (srctype) = 1;
8975 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8976 src = fold_convert (srcptype, src);
8977 srcvar = build_fold_indirect_ref (src);
8979 else if (destvar == NULL_TREE)
8981 tree destptype;
8982 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8983 return NULL_TREE;
8985 desttype = build_qualified_type (srctype, 0);
8986 if (dest_align < (int) TYPE_ALIGN (desttype))
8988 if (AGGREGATE_TYPE_P (desttype)
8989 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8990 return NULL_TREE;
8992 desttype = build_variant_type_copy (desttype);
8993 TYPE_ALIGN (desttype) = dest_align;
8994 TYPE_USER_ALIGN (desttype) = 1;
8995 TYPE_PACKED (desttype) = 1;
8997 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8998 dest = fold_convert (destptype, dest);
8999 destvar = build_fold_indirect_ref (dest);
9002 if (srctype == desttype
9003 || (gimple_in_ssa_p (cfun)
9004 && useless_type_conversion_p (desttype, srctype)))
9005 expr = srcvar;
9006 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9007 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9008 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9009 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9010 expr = fold_convert (TREE_TYPE (destvar), srcvar);
9011 else
9012 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
9013 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9016 if (ignore)
9017 return expr;
9019 if (endp == 0 || endp == 3)
9020 return omit_one_operand (type, dest, expr);
9022 if (expr == len)
9023 expr = NULL_TREE;
9025 if (endp == 2)
9026 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
9027 ssize_int (1));
9029 len = fold_convert (sizetype, len);
9030 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9031 dest = fold_convert (type, dest);
9032 if (expr)
9033 dest = omit_one_operand (type, dest, expr);
9034 return dest;
9037 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9038 If LEN is not NULL, it represents the length of the string to be
9039 copied. Return NULL_TREE if no simplification can be made. */
9041 tree
9042 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
9044 tree fn;
9046 if (!validate_arg (dest, POINTER_TYPE)
9047 || !validate_arg (src, POINTER_TYPE))
9048 return NULL_TREE;
9050 /* If SRC and DEST are the same (and not volatile), return DEST. */
9051 if (operand_equal_p (src, dest, 0))
9052 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
9054 if (optimize_function_for_size_p (cfun))
9055 return NULL_TREE;
9057 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9058 if (!fn)
9059 return NULL_TREE;
9061 if (!len)
9063 len = c_strlen (src, 1);
9064 if (! len || TREE_SIDE_EFFECTS (len))
9065 return NULL_TREE;
9068 len = size_binop (PLUS_EXPR, len, ssize_int (1));
9069 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9070 build_call_expr (fn, 3, dest, src, len));
9073 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9074 If SLEN is not NULL, it represents the length of the source string.
9075 Return NULL_TREE if no simplification can be made. */
9077 tree
9078 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
9080 tree fn;
9082 if (!validate_arg (dest, POINTER_TYPE)
9083 || !validate_arg (src, POINTER_TYPE)
9084 || !validate_arg (len, INTEGER_TYPE))
9085 return NULL_TREE;
9087 /* If the LEN parameter is zero, return DEST. */
9088 if (integer_zerop (len))
9089 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9091 /* We can't compare slen with len as constants below if len is not a
9092 constant. */
9093 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9094 return NULL_TREE;
9096 if (!slen)
9097 slen = c_strlen (src, 1);
9099 /* Now, we must be passed a constant src ptr parameter. */
9100 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9101 return NULL_TREE;
9103 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
9105 /* We do not support simplification of this case, though we do
9106 support it when expanding trees into RTL. */
9107 /* FIXME: generate a call to __builtin_memset. */
9108 if (tree_int_cst_lt (slen, len))
9109 return NULL_TREE;
9111 /* OK transform into builtin memcpy. */
9112 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9113 if (!fn)
9114 return NULL_TREE;
9115 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9116 build_call_expr (fn, 3, dest, src, len));
9119 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9120 arguments to the call, and TYPE is its return type.
9121 Return NULL_TREE if no simplification can be made. */
9123 static tree
9124 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
9126 if (!validate_arg (arg1, POINTER_TYPE)
9127 || !validate_arg (arg2, INTEGER_TYPE)
9128 || !validate_arg (len, INTEGER_TYPE))
9129 return NULL_TREE;
9130 else
9132 const char *p1;
9134 if (TREE_CODE (arg2) != INTEGER_CST
9135 || !host_integerp (len, 1))
9136 return NULL_TREE;
9138 p1 = c_getstr (arg1);
9139 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9141 char c;
9142 const char *r;
9143 tree tem;
9145 if (target_char_cast (arg2, &c))
9146 return NULL_TREE;
9148 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9150 if (r == NULL)
9151 return build_int_cst (TREE_TYPE (arg1), 0);
9153 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9154 size_int (r - p1));
9155 return fold_convert (type, tem);
9157 return NULL_TREE;
9161 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9162 Return NULL_TREE if no simplification can be made. */
9164 static tree
9165 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9167 const char *p1, *p2;
9169 if (!validate_arg (arg1, POINTER_TYPE)
9170 || !validate_arg (arg2, POINTER_TYPE)
9171 || !validate_arg (len, INTEGER_TYPE))
9172 return NULL_TREE;
9174 /* If the LEN parameter is zero, return zero. */
9175 if (integer_zerop (len))
9176 return omit_two_operands (integer_type_node, integer_zero_node,
9177 arg1, arg2);
9179 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9180 if (operand_equal_p (arg1, arg2, 0))
9181 return omit_one_operand (integer_type_node, integer_zero_node, len);
9183 p1 = c_getstr (arg1);
9184 p2 = c_getstr (arg2);
9186 /* If all arguments are constant, and the value of len is not greater
9187 than the lengths of arg1 and arg2, evaluate at compile-time. */
9188 if (host_integerp (len, 1) && p1 && p2
9189 && compare_tree_int (len, strlen (p1) + 1) <= 0
9190 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9192 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9194 if (r > 0)
9195 return integer_one_node;
9196 else if (r < 0)
9197 return integer_minus_one_node;
9198 else
9199 return integer_zero_node;
9202 /* If len parameter is one, return an expression corresponding to
9203 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9204 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9206 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9207 tree cst_uchar_ptr_node
9208 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9210 tree ind1 = fold_convert (integer_type_node,
9211 build1 (INDIRECT_REF, cst_uchar_node,
9212 fold_convert (cst_uchar_ptr_node,
9213 arg1)));
9214 tree ind2 = fold_convert (integer_type_node,
9215 build1 (INDIRECT_REF, cst_uchar_node,
9216 fold_convert (cst_uchar_ptr_node,
9217 arg2)));
9218 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9221 return NULL_TREE;
9224 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9225 Return NULL_TREE if no simplification can be made. */
9227 static tree
9228 fold_builtin_strcmp (tree arg1, tree arg2)
9230 const char *p1, *p2;
9232 if (!validate_arg (arg1, POINTER_TYPE)
9233 || !validate_arg (arg2, POINTER_TYPE))
9234 return NULL_TREE;
9236 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9237 if (operand_equal_p (arg1, arg2, 0))
9238 return integer_zero_node;
9240 p1 = c_getstr (arg1);
9241 p2 = c_getstr (arg2);
9243 if (p1 && p2)
9245 const int i = strcmp (p1, p2);
9246 if (i < 0)
9247 return integer_minus_one_node;
9248 else if (i > 0)
9249 return integer_one_node;
9250 else
9251 return integer_zero_node;
9254 /* If the second arg is "", return *(const unsigned char*)arg1. */
9255 if (p2 && *p2 == '\0')
9257 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9258 tree cst_uchar_ptr_node
9259 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9261 return fold_convert (integer_type_node,
9262 build1 (INDIRECT_REF, cst_uchar_node,
9263 fold_convert (cst_uchar_ptr_node,
9264 arg1)));
9267 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9268 if (p1 && *p1 == '\0')
9270 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9271 tree cst_uchar_ptr_node
9272 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9274 tree temp = fold_convert (integer_type_node,
9275 build1 (INDIRECT_REF, cst_uchar_node,
9276 fold_convert (cst_uchar_ptr_node,
9277 arg2)));
9278 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9281 return NULL_TREE;
9284 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9285 Return NULL_TREE if no simplification can be made. */
9287 static tree
9288 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9290 const char *p1, *p2;
9292 if (!validate_arg (arg1, POINTER_TYPE)
9293 || !validate_arg (arg2, POINTER_TYPE)
9294 || !validate_arg (len, INTEGER_TYPE))
9295 return NULL_TREE;
9297 /* If the LEN parameter is zero, return zero. */
9298 if (integer_zerop (len))
9299 return omit_two_operands (integer_type_node, integer_zero_node,
9300 arg1, arg2);
9302 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9303 if (operand_equal_p (arg1, arg2, 0))
9304 return omit_one_operand (integer_type_node, integer_zero_node, len);
9306 p1 = c_getstr (arg1);
9307 p2 = c_getstr (arg2);
9309 if (host_integerp (len, 1) && p1 && p2)
9311 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9312 if (i > 0)
9313 return integer_one_node;
9314 else if (i < 0)
9315 return integer_minus_one_node;
9316 else
9317 return integer_zero_node;
9320 /* If the second arg is "", and the length is greater than zero,
9321 return *(const unsigned char*)arg1. */
9322 if (p2 && *p2 == '\0'
9323 && TREE_CODE (len) == INTEGER_CST
9324 && tree_int_cst_sgn (len) == 1)
9326 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9327 tree cst_uchar_ptr_node
9328 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9330 return fold_convert (integer_type_node,
9331 build1 (INDIRECT_REF, cst_uchar_node,
9332 fold_convert (cst_uchar_ptr_node,
9333 arg1)));
9336 /* If the first arg is "", and the length is greater than zero,
9337 return -*(const unsigned char*)arg2. */
9338 if (p1 && *p1 == '\0'
9339 && TREE_CODE (len) == INTEGER_CST
9340 && tree_int_cst_sgn (len) == 1)
9342 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9343 tree cst_uchar_ptr_node
9344 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9346 tree temp = fold_convert (integer_type_node,
9347 build1 (INDIRECT_REF, cst_uchar_node,
9348 fold_convert (cst_uchar_ptr_node,
9349 arg2)));
9350 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9353 /* If len parameter is one, return an expression corresponding to
9354 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9355 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9357 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9358 tree cst_uchar_ptr_node
9359 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9361 tree ind1 = fold_convert (integer_type_node,
9362 build1 (INDIRECT_REF, cst_uchar_node,
9363 fold_convert (cst_uchar_ptr_node,
9364 arg1)));
9365 tree ind2 = fold_convert (integer_type_node,
9366 build1 (INDIRECT_REF, cst_uchar_node,
9367 fold_convert (cst_uchar_ptr_node,
9368 arg2)));
9369 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9372 return NULL_TREE;
9375 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9376 ARG. Return NULL_TREE if no simplification can be made. */
9378 static tree
9379 fold_builtin_signbit (tree arg, tree type)
9381 tree temp;
9383 if (!validate_arg (arg, REAL_TYPE))
9384 return NULL_TREE;
9386 /* If ARG is a compile-time constant, determine the result. */
9387 if (TREE_CODE (arg) == REAL_CST
9388 && !TREE_OVERFLOW (arg))
9390 REAL_VALUE_TYPE c;
9392 c = TREE_REAL_CST (arg);
9393 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9394 return fold_convert (type, temp);
9397 /* If ARG is non-negative, the result is always zero. */
9398 if (tree_expr_nonnegative_p (arg))
9399 return omit_one_operand (type, integer_zero_node, arg);
9401 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9402 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9403 return fold_build2 (LT_EXPR, type, arg,
9404 build_real (TREE_TYPE (arg), dconst0));
9406 return NULL_TREE;
9409 /* Fold function call to builtin copysign, copysignf or copysignl with
9410 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9411 be made. */
9413 static tree
9414 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9416 tree tem;
9418 if (!validate_arg (arg1, REAL_TYPE)
9419 || !validate_arg (arg2, REAL_TYPE))
9420 return NULL_TREE;
9422 /* copysign(X,X) is X. */
9423 if (operand_equal_p (arg1, arg2, 0))
9424 return fold_convert (type, arg1);
9426 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9427 if (TREE_CODE (arg1) == REAL_CST
9428 && TREE_CODE (arg2) == REAL_CST
9429 && !TREE_OVERFLOW (arg1)
9430 && !TREE_OVERFLOW (arg2))
9432 REAL_VALUE_TYPE c1, c2;
9434 c1 = TREE_REAL_CST (arg1);
9435 c2 = TREE_REAL_CST (arg2);
9436 /* c1.sign := c2.sign. */
9437 real_copysign (&c1, &c2);
9438 return build_real (type, c1);
9441 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9442 Remember to evaluate Y for side-effects. */
9443 if (tree_expr_nonnegative_p (arg2))
9444 return omit_one_operand (type,
9445 fold_build1 (ABS_EXPR, type, arg1),
9446 arg2);
9448 /* Strip sign changing operations for the first argument. */
9449 tem = fold_strip_sign_ops (arg1);
9450 if (tem)
9451 return build_call_expr (fndecl, 2, tem, arg2);
9453 return NULL_TREE;
9456 /* Fold a call to builtin isascii with argument ARG. */
9458 static tree
9459 fold_builtin_isascii (tree arg)
9461 if (!validate_arg (arg, INTEGER_TYPE))
9462 return NULL_TREE;
9463 else
9465 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9466 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9467 build_int_cst (NULL_TREE,
9468 ~ (unsigned HOST_WIDE_INT) 0x7f));
9469 return fold_build2 (EQ_EXPR, integer_type_node,
9470 arg, integer_zero_node);
9474 /* Fold a call to builtin toascii with argument ARG. */
9476 static tree
9477 fold_builtin_toascii (tree arg)
9479 if (!validate_arg (arg, INTEGER_TYPE))
9480 return NULL_TREE;
9482 /* Transform toascii(c) -> (c & 0x7f). */
9483 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9484 build_int_cst (NULL_TREE, 0x7f));
9487 /* Fold a call to builtin isdigit with argument ARG. */
9489 static tree
9490 fold_builtin_isdigit (tree arg)
9492 if (!validate_arg (arg, INTEGER_TYPE))
9493 return NULL_TREE;
9494 else
9496 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9497 /* According to the C standard, isdigit is unaffected by locale.
9498 However, it definitely is affected by the target character set. */
9499 unsigned HOST_WIDE_INT target_digit0
9500 = lang_hooks.to_target_charset ('0');
9502 if (target_digit0 == 0)
9503 return NULL_TREE;
9505 arg = fold_convert (unsigned_type_node, arg);
9506 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9507 build_int_cst (unsigned_type_node, target_digit0));
9508 return fold_build2 (LE_EXPR, integer_type_node, arg,
9509 build_int_cst (unsigned_type_node, 9));
9513 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9515 static tree
9516 fold_builtin_fabs (tree arg, tree type)
9518 if (!validate_arg (arg, REAL_TYPE))
9519 return NULL_TREE;
9521 arg = fold_convert (type, arg);
9522 if (TREE_CODE (arg) == REAL_CST)
9523 return fold_abs_const (arg, type);
9524 return fold_build1 (ABS_EXPR, type, arg);
9527 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9529 static tree
9530 fold_builtin_abs (tree arg, tree type)
9532 if (!validate_arg (arg, INTEGER_TYPE))
9533 return NULL_TREE;
9535 arg = fold_convert (type, arg);
9536 if (TREE_CODE (arg) == INTEGER_CST)
9537 return fold_abs_const (arg, type);
9538 return fold_build1 (ABS_EXPR, type, arg);
9541 /* Fold a call to builtin fmin or fmax. */
9543 static tree
9544 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9546 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9548 /* Calculate the result when the argument is a constant. */
9549 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9551 if (res)
9552 return res;
9554 /* If either argument is NaN, return the other one. Avoid the
9555 transformation if we get (and honor) a signalling NaN. Using
9556 omit_one_operand() ensures we create a non-lvalue. */
9557 if (TREE_CODE (arg0) == REAL_CST
9558 && real_isnan (&TREE_REAL_CST (arg0))
9559 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9560 || ! TREE_REAL_CST (arg0).signalling))
9561 return omit_one_operand (type, arg1, arg0);
9562 if (TREE_CODE (arg1) == REAL_CST
9563 && real_isnan (&TREE_REAL_CST (arg1))
9564 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9565 || ! TREE_REAL_CST (arg1).signalling))
9566 return omit_one_operand (type, arg0, arg1);
9568 /* Transform fmin/fmax(x,x) -> x. */
9569 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9570 return omit_one_operand (type, arg0, arg1);
9572 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9573 functions to return the numeric arg if the other one is NaN.
9574 These tree codes don't honor that, so only transform if
9575 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9576 handled, so we don't have to worry about it either. */
9577 if (flag_finite_math_only)
9578 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9579 fold_convert (type, arg0),
9580 fold_convert (type, arg1));
9582 return NULL_TREE;
9585 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9587 static tree
9588 fold_builtin_carg (tree arg, tree type)
9590 if (validate_arg (arg, COMPLEX_TYPE))
9592 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9594 if (atan2_fn)
9596 tree new_arg = builtin_save_expr (arg);
9597 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9598 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9599 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9603 return NULL_TREE;
9606 /* Fold a call to builtin logb/ilogb. */
9608 static tree
9609 fold_builtin_logb (tree arg, tree rettype)
9611 if (! validate_arg (arg, REAL_TYPE))
9612 return NULL_TREE;
9614 STRIP_NOPS (arg);
9616 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9618 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9620 switch (value->cl)
9622 case rvc_nan:
9623 case rvc_inf:
9624 /* If arg is Inf or NaN and we're logb, return it. */
9625 if (TREE_CODE (rettype) == REAL_TYPE)
9626 return fold_convert (rettype, arg);
9627 /* Fall through... */
9628 case rvc_zero:
9629 /* Zero may set errno and/or raise an exception for logb, also
9630 for ilogb we don't know FP_ILOGB0. */
9631 return NULL_TREE;
9632 case rvc_normal:
9633 /* For normal numbers, proceed iff radix == 2. In GCC,
9634 normalized significands are in the range [0.5, 1.0). We
9635 want the exponent as if they were [1.0, 2.0) so get the
9636 exponent and subtract 1. */
9637 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9638 return fold_convert (rettype, build_int_cst (NULL_TREE,
9639 REAL_EXP (value)-1));
9640 break;
9644 return NULL_TREE;
9647 /* Fold a call to builtin significand, if radix == 2. */
9649 static tree
9650 fold_builtin_significand (tree arg, tree rettype)
9652 if (! validate_arg (arg, REAL_TYPE))
9653 return NULL_TREE;
9655 STRIP_NOPS (arg);
9657 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9659 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9661 switch (value->cl)
9663 case rvc_zero:
9664 case rvc_nan:
9665 case rvc_inf:
9666 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9667 return fold_convert (rettype, arg);
9668 case rvc_normal:
9669 /* For normal numbers, proceed iff radix == 2. */
9670 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9672 REAL_VALUE_TYPE result = *value;
9673 /* In GCC, normalized significands are in the range [0.5,
9674 1.0). We want them to be [1.0, 2.0) so set the
9675 exponent to 1. */
9676 SET_REAL_EXP (&result, 1);
9677 return build_real (rettype, result);
9679 break;
9683 return NULL_TREE;
9686 /* Fold a call to builtin frexp, we can assume the base is 2. */
9688 static tree
9689 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9691 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9692 return NULL_TREE;
9694 STRIP_NOPS (arg0);
9696 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9697 return NULL_TREE;
9699 arg1 = build_fold_indirect_ref (arg1);
9701 /* Proceed if a valid pointer type was passed in. */
9702 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9704 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9705 tree frac, exp;
9707 switch (value->cl)
9709 case rvc_zero:
9710 /* For +-0, return (*exp = 0, +-0). */
9711 exp = integer_zero_node;
9712 frac = arg0;
9713 break;
9714 case rvc_nan:
9715 case rvc_inf:
9716 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9717 return omit_one_operand (rettype, arg0, arg1);
9718 case rvc_normal:
9720 /* Since the frexp function always expects base 2, and in
9721 GCC normalized significands are already in the range
9722 [0.5, 1.0), we have exactly what frexp wants. */
9723 REAL_VALUE_TYPE frac_rvt = *value;
9724 SET_REAL_EXP (&frac_rvt, 0);
9725 frac = build_real (rettype, frac_rvt);
9726 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9728 break;
9729 default:
9730 gcc_unreachable ();
9733 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9734 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9735 TREE_SIDE_EFFECTS (arg1) = 1;
9736 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9739 return NULL_TREE;
9742 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9743 then we can assume the base is two. If it's false, then we have to
9744 check the mode of the TYPE parameter in certain cases. */
9746 static tree
9747 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9749 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9751 STRIP_NOPS (arg0);
9752 STRIP_NOPS (arg1);
9754 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9755 if (real_zerop (arg0) || integer_zerop (arg1)
9756 || (TREE_CODE (arg0) == REAL_CST
9757 && !real_isfinite (&TREE_REAL_CST (arg0))))
9758 return omit_one_operand (type, arg0, arg1);
9760 /* If both arguments are constant, then try to evaluate it. */
9761 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9762 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9763 && host_integerp (arg1, 0))
9765 /* Bound the maximum adjustment to twice the range of the
9766 mode's valid exponents. Use abs to ensure the range is
9767 positive as a sanity check. */
9768 const long max_exp_adj = 2 *
9769 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9770 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9772 /* Get the user-requested adjustment. */
9773 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9775 /* The requested adjustment must be inside this range. This
9776 is a preliminary cap to avoid things like overflow, we
9777 may still fail to compute the result for other reasons. */
9778 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9780 REAL_VALUE_TYPE initial_result;
9782 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9784 /* Ensure we didn't overflow. */
9785 if (! real_isinf (&initial_result))
9787 const REAL_VALUE_TYPE trunc_result
9788 = real_value_truncate (TYPE_MODE (type), initial_result);
9790 /* Only proceed if the target mode can hold the
9791 resulting value. */
9792 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9793 return build_real (type, trunc_result);
9799 return NULL_TREE;
9802 /* Fold a call to builtin modf. */
9804 static tree
9805 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9807 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9808 return NULL_TREE;
9810 STRIP_NOPS (arg0);
9812 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9813 return NULL_TREE;
9815 arg1 = build_fold_indirect_ref (arg1);
9817 /* Proceed if a valid pointer type was passed in. */
9818 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9820 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9821 REAL_VALUE_TYPE trunc, frac;
9823 switch (value->cl)
9825 case rvc_nan:
9826 case rvc_zero:
9827 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9828 trunc = frac = *value;
9829 break;
9830 case rvc_inf:
9831 /* For +-Inf, return (*arg1 = arg0, +-0). */
9832 frac = dconst0;
9833 frac.sign = value->sign;
9834 trunc = *value;
9835 break;
9836 case rvc_normal:
9837 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9838 real_trunc (&trunc, VOIDmode, value);
9839 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9840 /* If the original number was negative and already
9841 integral, then the fractional part is -0.0. */
9842 if (value->sign && frac.cl == rvc_zero)
9843 frac.sign = value->sign;
9844 break;
9847 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9848 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9849 build_real (rettype, trunc));
9850 TREE_SIDE_EFFECTS (arg1) = 1;
9851 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9852 build_real (rettype, frac));
9855 return NULL_TREE;
9858 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9859 ARG is the argument for the call. */
9861 static tree
9862 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9864 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9865 REAL_VALUE_TYPE r;
9867 if (!validate_arg (arg, REAL_TYPE))
9868 return NULL_TREE;
9870 switch (builtin_index)
9872 case BUILT_IN_ISINF:
9873 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9874 return omit_one_operand (type, integer_zero_node, arg);
9876 if (TREE_CODE (arg) == REAL_CST)
9878 r = TREE_REAL_CST (arg);
9879 if (real_isinf (&r))
9880 return real_compare (GT_EXPR, &r, &dconst0)
9881 ? integer_one_node : integer_minus_one_node;
9882 else
9883 return integer_zero_node;
9886 return NULL_TREE;
9888 case BUILT_IN_ISINF_SIGN:
9890 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9891 /* In a boolean context, GCC will fold the inner COND_EXPR to
9892 1. So e.g. "if (isinf_sign(x))" would be folded to just
9893 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9894 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9895 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9896 tree tmp = NULL_TREE;
9898 arg = builtin_save_expr (arg);
9900 if (signbit_fn && isinf_fn)
9902 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9903 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9905 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9906 signbit_call, integer_zero_node);
9907 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9908 isinf_call, integer_zero_node);
9910 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9911 integer_minus_one_node, integer_one_node);
9912 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9913 integer_zero_node);
9916 return tmp;
9919 case BUILT_IN_ISFINITE:
9920 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9921 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9922 return omit_one_operand (type, integer_one_node, arg);
9924 if (TREE_CODE (arg) == REAL_CST)
9926 r = TREE_REAL_CST (arg);
9927 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9930 return NULL_TREE;
9932 case BUILT_IN_ISNAN:
9933 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9934 return omit_one_operand (type, integer_zero_node, arg);
9936 if (TREE_CODE (arg) == REAL_CST)
9938 r = TREE_REAL_CST (arg);
9939 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9942 arg = builtin_save_expr (arg);
9943 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9945 default:
9946 gcc_unreachable ();
9950 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9951 This builtin will generate code to return the appropriate floating
9952 point classification depending on the value of the floating point
9953 number passed in. The possible return values must be supplied as
9954 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9955 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9956 one floating point argument which is "type generic". */
9958 static tree
9959 fold_builtin_fpclassify (tree exp)
9961 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9962 arg, type, res, tmp;
9963 enum machine_mode mode;
9964 REAL_VALUE_TYPE r;
9965 char buf[128];
9967 /* Verify the required arguments in the original call. */
9968 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9969 INTEGER_TYPE, INTEGER_TYPE,
9970 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9971 return NULL_TREE;
9973 fp_nan = CALL_EXPR_ARG (exp, 0);
9974 fp_infinite = CALL_EXPR_ARG (exp, 1);
9975 fp_normal = CALL_EXPR_ARG (exp, 2);
9976 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9977 fp_zero = CALL_EXPR_ARG (exp, 4);
9978 arg = CALL_EXPR_ARG (exp, 5);
9979 type = TREE_TYPE (arg);
9980 mode = TYPE_MODE (type);
9981 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9983 /* fpclassify(x) ->
9984 isnan(x) ? FP_NAN :
9985 (fabs(x) == Inf ? FP_INFINITE :
9986 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9987 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9989 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9990 build_real (type, dconst0));
9991 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9993 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9994 real_from_string (&r, buf);
9995 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9996 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9998 if (HONOR_INFINITIES (mode))
10000 real_inf (&r);
10001 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10002 build_real (type, r));
10003 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
10006 if (HONOR_NANS (mode))
10008 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
10009 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
10012 return res;
10015 /* Fold a call to an unordered comparison function such as
10016 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10017 being called and ARG0 and ARG1 are the arguments for the call.
10018 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10019 the opposite of the desired result. UNORDERED_CODE is used
10020 for modes that can hold NaNs and ORDERED_CODE is used for
10021 the rest. */
10023 static tree
10024 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
10025 enum tree_code unordered_code,
10026 enum tree_code ordered_code)
10028 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10029 enum tree_code code;
10030 tree type0, type1;
10031 enum tree_code code0, code1;
10032 tree cmp_type = NULL_TREE;
10034 type0 = TREE_TYPE (arg0);
10035 type1 = TREE_TYPE (arg1);
10037 code0 = TREE_CODE (type0);
10038 code1 = TREE_CODE (type1);
10040 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10041 /* Choose the wider of two real types. */
10042 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10043 ? type0 : type1;
10044 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10045 cmp_type = type0;
10046 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10047 cmp_type = type1;
10049 arg0 = fold_convert (cmp_type, arg0);
10050 arg1 = fold_convert (cmp_type, arg1);
10052 if (unordered_code == UNORDERED_EXPR)
10054 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10055 return omit_two_operands (type, integer_zero_node, arg0, arg1);
10056 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
10059 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10060 : ordered_code;
10061 return fold_build1 (TRUTH_NOT_EXPR, type,
10062 fold_build2 (code, type, arg0, arg1));
10065 /* Fold a call to built-in function FNDECL with 0 arguments.
10066 IGNORE is true if the result of the function call is ignored. This
10067 function returns NULL_TREE if no simplification was possible. */
10069 static tree
10070 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10072 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10073 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10074 switch (fcode)
10076 CASE_FLT_FN (BUILT_IN_INF):
10077 case BUILT_IN_INFD32:
10078 case BUILT_IN_INFD64:
10079 case BUILT_IN_INFD128:
10080 return fold_builtin_inf (type, true);
10082 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10083 return fold_builtin_inf (type, false);
10085 case BUILT_IN_CLASSIFY_TYPE:
10086 return fold_builtin_classify_type (NULL_TREE);
10088 default:
10089 break;
10091 return NULL_TREE;
10094 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10095 IGNORE is true if the result of the function call is ignored. This
10096 function returns NULL_TREE if no simplification was possible. */
10098 static tree
10099 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
10101 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10102 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10103 switch (fcode)
10106 case BUILT_IN_CONSTANT_P:
10108 tree val = fold_builtin_constant_p (arg0);
10110 /* Gimplification will pull the CALL_EXPR for the builtin out of
10111 an if condition. When not optimizing, we'll not CSE it back.
10112 To avoid link error types of regressions, return false now. */
10113 if (!val && !optimize)
10114 val = integer_zero_node;
10116 return val;
10119 case BUILT_IN_CLASSIFY_TYPE:
10120 return fold_builtin_classify_type (arg0);
10122 case BUILT_IN_STRLEN:
10123 return fold_builtin_strlen (arg0);
10125 CASE_FLT_FN (BUILT_IN_FABS):
10126 return fold_builtin_fabs (arg0, type);
10128 case BUILT_IN_ABS:
10129 case BUILT_IN_LABS:
10130 case BUILT_IN_LLABS:
10131 case BUILT_IN_IMAXABS:
10132 return fold_builtin_abs (arg0, type);
10134 CASE_FLT_FN (BUILT_IN_CONJ):
10135 if (validate_arg (arg0, COMPLEX_TYPE))
10136 return fold_build1 (CONJ_EXPR, type, arg0);
10137 break;
10139 CASE_FLT_FN (BUILT_IN_CREAL):
10140 if (validate_arg (arg0, COMPLEX_TYPE))
10141 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
10142 break;
10144 CASE_FLT_FN (BUILT_IN_CIMAG):
10145 if (validate_arg (arg0, COMPLEX_TYPE))
10146 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10147 break;
10149 CASE_FLT_FN (BUILT_IN_CCOS):
10150 CASE_FLT_FN (BUILT_IN_CCOSH):
10151 /* These functions are "even", i.e. f(x) == f(-x). */
10152 if (validate_arg (arg0, COMPLEX_TYPE))
10154 tree narg = fold_strip_sign_ops (arg0);
10155 if (narg)
10156 return build_call_expr (fndecl, 1, narg);
10158 break;
10160 CASE_FLT_FN (BUILT_IN_CABS):
10161 return fold_builtin_cabs (arg0, type, fndecl);
10163 CASE_FLT_FN (BUILT_IN_CARG):
10164 return fold_builtin_carg (arg0, type);
10166 CASE_FLT_FN (BUILT_IN_SQRT):
10167 return fold_builtin_sqrt (arg0, type);
10169 CASE_FLT_FN (BUILT_IN_CBRT):
10170 return fold_builtin_cbrt (arg0, type);
10172 CASE_FLT_FN (BUILT_IN_ASIN):
10173 if (validate_arg (arg0, REAL_TYPE))
10174 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10175 &dconstm1, &dconst1, true);
10176 break;
10178 CASE_FLT_FN (BUILT_IN_ACOS):
10179 if (validate_arg (arg0, REAL_TYPE))
10180 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10181 &dconstm1, &dconst1, true);
10182 break;
10184 CASE_FLT_FN (BUILT_IN_ATAN):
10185 if (validate_arg (arg0, REAL_TYPE))
10186 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10187 break;
10189 CASE_FLT_FN (BUILT_IN_ASINH):
10190 if (validate_arg (arg0, REAL_TYPE))
10191 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10192 break;
10194 CASE_FLT_FN (BUILT_IN_ACOSH):
10195 if (validate_arg (arg0, REAL_TYPE))
10196 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10197 &dconst1, NULL, true);
10198 break;
10200 CASE_FLT_FN (BUILT_IN_ATANH):
10201 if (validate_arg (arg0, REAL_TYPE))
10202 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10203 &dconstm1, &dconst1, false);
10204 break;
10206 CASE_FLT_FN (BUILT_IN_SIN):
10207 if (validate_arg (arg0, REAL_TYPE))
10208 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10209 break;
10211 CASE_FLT_FN (BUILT_IN_COS):
10212 return fold_builtin_cos (arg0, type, fndecl);
10213 break;
10215 CASE_FLT_FN (BUILT_IN_TAN):
10216 return fold_builtin_tan (arg0, type);
10218 CASE_FLT_FN (BUILT_IN_CEXP):
10219 return fold_builtin_cexp (arg0, type);
10221 CASE_FLT_FN (BUILT_IN_CEXPI):
10222 if (validate_arg (arg0, REAL_TYPE))
10223 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10224 break;
10226 CASE_FLT_FN (BUILT_IN_SINH):
10227 if (validate_arg (arg0, REAL_TYPE))
10228 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10229 break;
10231 CASE_FLT_FN (BUILT_IN_COSH):
10232 return fold_builtin_cosh (arg0, type, fndecl);
10234 CASE_FLT_FN (BUILT_IN_TANH):
10235 if (validate_arg (arg0, REAL_TYPE))
10236 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10237 break;
10239 CASE_FLT_FN (BUILT_IN_ERF):
10240 if (validate_arg (arg0, REAL_TYPE))
10241 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10242 break;
10244 CASE_FLT_FN (BUILT_IN_ERFC):
10245 if (validate_arg (arg0, REAL_TYPE))
10246 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10247 break;
10249 CASE_FLT_FN (BUILT_IN_TGAMMA):
10250 if (validate_arg (arg0, REAL_TYPE))
10251 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10252 break;
10254 CASE_FLT_FN (BUILT_IN_EXP):
10255 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10257 CASE_FLT_FN (BUILT_IN_EXP2):
10258 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10260 CASE_FLT_FN (BUILT_IN_EXP10):
10261 CASE_FLT_FN (BUILT_IN_POW10):
10262 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10264 CASE_FLT_FN (BUILT_IN_EXPM1):
10265 if (validate_arg (arg0, REAL_TYPE))
10266 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10267 break;
10269 CASE_FLT_FN (BUILT_IN_LOG):
10270 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10272 CASE_FLT_FN (BUILT_IN_LOG2):
10273 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10275 CASE_FLT_FN (BUILT_IN_LOG10):
10276 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10278 CASE_FLT_FN (BUILT_IN_LOG1P):
10279 if (validate_arg (arg0, REAL_TYPE))
10280 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10281 &dconstm1, NULL, false);
10282 break;
10284 CASE_FLT_FN (BUILT_IN_J0):
10285 if (validate_arg (arg0, REAL_TYPE))
10286 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10287 NULL, NULL, 0);
10288 break;
10290 CASE_FLT_FN (BUILT_IN_J1):
10291 if (validate_arg (arg0, REAL_TYPE))
10292 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10293 NULL, NULL, 0);
10294 break;
10296 CASE_FLT_FN (BUILT_IN_Y0):
10297 if (validate_arg (arg0, REAL_TYPE))
10298 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10299 &dconst0, NULL, false);
10300 break;
10302 CASE_FLT_FN (BUILT_IN_Y1):
10303 if (validate_arg (arg0, REAL_TYPE))
10304 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10305 &dconst0, NULL, false);
10306 break;
10308 CASE_FLT_FN (BUILT_IN_NAN):
10309 case BUILT_IN_NAND32:
10310 case BUILT_IN_NAND64:
10311 case BUILT_IN_NAND128:
10312 return fold_builtin_nan (arg0, type, true);
10314 CASE_FLT_FN (BUILT_IN_NANS):
10315 return fold_builtin_nan (arg0, type, false);
10317 CASE_FLT_FN (BUILT_IN_FLOOR):
10318 return fold_builtin_floor (fndecl, arg0);
10320 CASE_FLT_FN (BUILT_IN_CEIL):
10321 return fold_builtin_ceil (fndecl, arg0);
10323 CASE_FLT_FN (BUILT_IN_TRUNC):
10324 return fold_builtin_trunc (fndecl, arg0);
10326 CASE_FLT_FN (BUILT_IN_ROUND):
10327 return fold_builtin_round (fndecl, arg0);
10329 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10330 CASE_FLT_FN (BUILT_IN_RINT):
10331 return fold_trunc_transparent_mathfn (fndecl, arg0);
10333 CASE_FLT_FN (BUILT_IN_LCEIL):
10334 CASE_FLT_FN (BUILT_IN_LLCEIL):
10335 CASE_FLT_FN (BUILT_IN_LFLOOR):
10336 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10337 CASE_FLT_FN (BUILT_IN_LROUND):
10338 CASE_FLT_FN (BUILT_IN_LLROUND):
10339 return fold_builtin_int_roundingfn (fndecl, arg0);
10341 CASE_FLT_FN (BUILT_IN_LRINT):
10342 CASE_FLT_FN (BUILT_IN_LLRINT):
10343 return fold_fixed_mathfn (fndecl, arg0);
10345 case BUILT_IN_BSWAP32:
10346 case BUILT_IN_BSWAP64:
10347 return fold_builtin_bswap (fndecl, arg0);
10349 CASE_INT_FN (BUILT_IN_FFS):
10350 CASE_INT_FN (BUILT_IN_CLZ):
10351 CASE_INT_FN (BUILT_IN_CTZ):
10352 CASE_INT_FN (BUILT_IN_POPCOUNT):
10353 CASE_INT_FN (BUILT_IN_PARITY):
10354 return fold_builtin_bitop (fndecl, arg0);
10356 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10357 return fold_builtin_signbit (arg0, type);
10359 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10360 return fold_builtin_significand (arg0, type);
10362 CASE_FLT_FN (BUILT_IN_ILOGB):
10363 CASE_FLT_FN (BUILT_IN_LOGB):
10364 return fold_builtin_logb (arg0, type);
10366 case BUILT_IN_ISASCII:
10367 return fold_builtin_isascii (arg0);
10369 case BUILT_IN_TOASCII:
10370 return fold_builtin_toascii (arg0);
10372 case BUILT_IN_ISDIGIT:
10373 return fold_builtin_isdigit (arg0);
10375 CASE_FLT_FN (BUILT_IN_FINITE):
10376 case BUILT_IN_FINITED32:
10377 case BUILT_IN_FINITED64:
10378 case BUILT_IN_FINITED128:
10379 case BUILT_IN_ISFINITE:
10380 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10382 CASE_FLT_FN (BUILT_IN_ISINF):
10383 case BUILT_IN_ISINFD32:
10384 case BUILT_IN_ISINFD64:
10385 case BUILT_IN_ISINFD128:
10386 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10388 case BUILT_IN_ISINF_SIGN:
10389 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10391 CASE_FLT_FN (BUILT_IN_ISNAN):
10392 case BUILT_IN_ISNAND32:
10393 case BUILT_IN_ISNAND64:
10394 case BUILT_IN_ISNAND128:
10395 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10397 case BUILT_IN_PRINTF:
10398 case BUILT_IN_PRINTF_UNLOCKED:
10399 case BUILT_IN_VPRINTF:
10400 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10402 default:
10403 break;
10406 return NULL_TREE;
10410 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10411 IGNORE is true if the result of the function call is ignored. This
10412 function returns NULL_TREE if no simplification was possible. */
10414 static tree
10415 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10417 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10418 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10420 switch (fcode)
10422 CASE_FLT_FN (BUILT_IN_JN):
10423 if (validate_arg (arg0, INTEGER_TYPE)
10424 && validate_arg (arg1, REAL_TYPE))
10425 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10426 break;
10428 CASE_FLT_FN (BUILT_IN_YN):
10429 if (validate_arg (arg0, INTEGER_TYPE)
10430 && validate_arg (arg1, REAL_TYPE))
10431 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10432 &dconst0, false);
10433 break;
10435 CASE_FLT_FN (BUILT_IN_DREM):
10436 CASE_FLT_FN (BUILT_IN_REMAINDER):
10437 if (validate_arg (arg0, REAL_TYPE)
10438 && validate_arg(arg1, REAL_TYPE))
10439 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10440 break;
10442 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10443 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10444 if (validate_arg (arg0, REAL_TYPE)
10445 && validate_arg(arg1, POINTER_TYPE))
10446 return do_mpfr_lgamma_r (arg0, arg1, type);
10447 break;
10449 CASE_FLT_FN (BUILT_IN_ATAN2):
10450 if (validate_arg (arg0, REAL_TYPE)
10451 && validate_arg(arg1, REAL_TYPE))
10452 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10453 break;
10455 CASE_FLT_FN (BUILT_IN_FDIM):
10456 if (validate_arg (arg0, REAL_TYPE)
10457 && validate_arg(arg1, REAL_TYPE))
10458 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10459 break;
10461 CASE_FLT_FN (BUILT_IN_HYPOT):
10462 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10464 CASE_FLT_FN (BUILT_IN_LDEXP):
10465 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10466 CASE_FLT_FN (BUILT_IN_SCALBN):
10467 CASE_FLT_FN (BUILT_IN_SCALBLN):
10468 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10470 CASE_FLT_FN (BUILT_IN_FREXP):
10471 return fold_builtin_frexp (arg0, arg1, type);
10473 CASE_FLT_FN (BUILT_IN_MODF):
10474 return fold_builtin_modf (arg0, arg1, type);
10476 case BUILT_IN_BZERO:
10477 return fold_builtin_bzero (arg0, arg1, ignore);
10479 case BUILT_IN_FPUTS:
10480 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10482 case BUILT_IN_FPUTS_UNLOCKED:
10483 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10485 case BUILT_IN_STRSTR:
10486 return fold_builtin_strstr (arg0, arg1, type);
10488 case BUILT_IN_STRCAT:
10489 return fold_builtin_strcat (arg0, arg1);
10491 case BUILT_IN_STRSPN:
10492 return fold_builtin_strspn (arg0, arg1);
10494 case BUILT_IN_STRCSPN:
10495 return fold_builtin_strcspn (arg0, arg1);
10497 case BUILT_IN_STRCHR:
10498 case BUILT_IN_INDEX:
10499 return fold_builtin_strchr (arg0, arg1, type);
10501 case BUILT_IN_STRRCHR:
10502 case BUILT_IN_RINDEX:
10503 return fold_builtin_strrchr (arg0, arg1, type);
10505 case BUILT_IN_STRCPY:
10506 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10508 case BUILT_IN_STPCPY:
10509 if (ignore)
10511 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10512 if (!fn)
10513 break;
10515 return build_call_expr (fn, 2, arg0, arg1);
10517 break;
10519 case BUILT_IN_STRCMP:
10520 return fold_builtin_strcmp (arg0, arg1);
10522 case BUILT_IN_STRPBRK:
10523 return fold_builtin_strpbrk (arg0, arg1, type);
10525 case BUILT_IN_EXPECT:
10526 return fold_builtin_expect (arg0, arg1);
10528 CASE_FLT_FN (BUILT_IN_POW):
10529 return fold_builtin_pow (fndecl, arg0, arg1, type);
10531 CASE_FLT_FN (BUILT_IN_POWI):
10532 return fold_builtin_powi (fndecl, arg0, arg1, type);
10534 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10535 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10537 CASE_FLT_FN (BUILT_IN_FMIN):
10538 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10540 CASE_FLT_FN (BUILT_IN_FMAX):
10541 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10543 case BUILT_IN_ISGREATER:
10544 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10545 case BUILT_IN_ISGREATEREQUAL:
10546 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10547 case BUILT_IN_ISLESS:
10548 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10549 case BUILT_IN_ISLESSEQUAL:
10550 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10551 case BUILT_IN_ISLESSGREATER:
10552 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10553 case BUILT_IN_ISUNORDERED:
10554 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10555 NOP_EXPR);
10557 /* We do the folding for va_start in the expander. */
10558 case BUILT_IN_VA_START:
10559 break;
10561 case BUILT_IN_SPRINTF:
10562 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10564 case BUILT_IN_OBJECT_SIZE:
10565 return fold_builtin_object_size (arg0, arg1);
10567 case BUILT_IN_PRINTF:
10568 case BUILT_IN_PRINTF_UNLOCKED:
10569 case BUILT_IN_VPRINTF:
10570 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10572 case BUILT_IN_PRINTF_CHK:
10573 case BUILT_IN_VPRINTF_CHK:
10574 if (!validate_arg (arg0, INTEGER_TYPE)
10575 || TREE_SIDE_EFFECTS (arg0))
10576 return NULL_TREE;
10577 else
10578 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10579 break;
10581 case BUILT_IN_FPRINTF:
10582 case BUILT_IN_FPRINTF_UNLOCKED:
10583 case BUILT_IN_VFPRINTF:
10584 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10585 ignore, fcode);
10587 default:
10588 break;
10590 return NULL_TREE;
10593 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10594 and ARG2. IGNORE is true if the result of the function call is ignored.
10595 This function returns NULL_TREE if no simplification was possible. */
10597 static tree
10598 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10600 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10601 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10602 switch (fcode)
10605 CASE_FLT_FN (BUILT_IN_SINCOS):
10606 return fold_builtin_sincos (arg0, arg1, arg2);
10608 CASE_FLT_FN (BUILT_IN_FMA):
10609 if (validate_arg (arg0, REAL_TYPE)
10610 && validate_arg(arg1, REAL_TYPE)
10611 && validate_arg(arg2, REAL_TYPE))
10612 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10613 break;
10615 CASE_FLT_FN (BUILT_IN_REMQUO):
10616 if (validate_arg (arg0, REAL_TYPE)
10617 && validate_arg(arg1, REAL_TYPE)
10618 && validate_arg(arg2, POINTER_TYPE))
10619 return do_mpfr_remquo (arg0, arg1, arg2);
10620 break;
10622 case BUILT_IN_MEMSET:
10623 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10625 case BUILT_IN_BCOPY:
10626 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10628 case BUILT_IN_MEMCPY:
10629 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10631 case BUILT_IN_MEMPCPY:
10632 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10634 case BUILT_IN_MEMMOVE:
10635 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10637 case BUILT_IN_STRNCAT:
10638 return fold_builtin_strncat (arg0, arg1, arg2);
10640 case BUILT_IN_STRNCPY:
10641 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10643 case BUILT_IN_STRNCMP:
10644 return fold_builtin_strncmp (arg0, arg1, arg2);
10646 case BUILT_IN_MEMCHR:
10647 return fold_builtin_memchr (arg0, arg1, arg2, type);
10649 case BUILT_IN_BCMP:
10650 case BUILT_IN_MEMCMP:
10651 return fold_builtin_memcmp (arg0, arg1, arg2);;
10653 case BUILT_IN_SPRINTF:
10654 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10656 case BUILT_IN_STRCPY_CHK:
10657 case BUILT_IN_STPCPY_CHK:
10658 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10659 ignore, fcode);
10661 case BUILT_IN_STRCAT_CHK:
10662 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10664 case BUILT_IN_PRINTF_CHK:
10665 case BUILT_IN_VPRINTF_CHK:
10666 if (!validate_arg (arg0, INTEGER_TYPE)
10667 || TREE_SIDE_EFFECTS (arg0))
10668 return NULL_TREE;
10669 else
10670 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10671 break;
10673 case BUILT_IN_FPRINTF:
10674 case BUILT_IN_FPRINTF_UNLOCKED:
10675 case BUILT_IN_VFPRINTF:
10676 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10678 case BUILT_IN_FPRINTF_CHK:
10679 case BUILT_IN_VFPRINTF_CHK:
10680 if (!validate_arg (arg1, INTEGER_TYPE)
10681 || TREE_SIDE_EFFECTS (arg1))
10682 return NULL_TREE;
10683 else
10684 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10685 ignore, fcode);
10687 default:
10688 break;
10690 return NULL_TREE;
10693 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10694 ARG2, and ARG3. IGNORE is true if the result of the function call is
10695 ignored. This function returns NULL_TREE if no simplification was
10696 possible. */
10698 static tree
10699 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10700 bool ignore)
10702 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10704 switch (fcode)
10706 case BUILT_IN_MEMCPY_CHK:
10707 case BUILT_IN_MEMPCPY_CHK:
10708 case BUILT_IN_MEMMOVE_CHK:
10709 case BUILT_IN_MEMSET_CHK:
10710 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10711 NULL_TREE, ignore,
10712 DECL_FUNCTION_CODE (fndecl));
10714 case BUILT_IN_STRNCPY_CHK:
10715 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10717 case BUILT_IN_STRNCAT_CHK:
10718 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10720 case BUILT_IN_FPRINTF_CHK:
10721 case BUILT_IN_VFPRINTF_CHK:
10722 if (!validate_arg (arg1, INTEGER_TYPE)
10723 || TREE_SIDE_EFFECTS (arg1))
10724 return NULL_TREE;
10725 else
10726 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10727 ignore, fcode);
10728 break;
10730 default:
10731 break;
10733 return NULL_TREE;
10736 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10737 arguments, where NARGS <= 4. IGNORE is true if the result of the
10738 function call is ignored. This function returns NULL_TREE if no
10739 simplification was possible. Note that this only folds builtins with
10740 fixed argument patterns. Foldings that do varargs-to-varargs
10741 transformations, or that match calls with more than 4 arguments,
10742 need to be handled with fold_builtin_varargs instead. */
10744 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10746 static tree
10747 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10749 tree ret = NULL_TREE;
10751 switch (nargs)
10753 case 0:
10754 ret = fold_builtin_0 (fndecl, ignore);
10755 break;
10756 case 1:
10757 ret = fold_builtin_1 (fndecl, args[0], ignore);
10758 break;
10759 case 2:
10760 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10761 break;
10762 case 3:
10763 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10764 break;
10765 case 4:
10766 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10767 ignore);
10768 break;
10769 default:
10770 break;
10772 if (ret)
10774 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10775 TREE_NO_WARNING (ret) = 1;
10776 return ret;
10778 return NULL_TREE;
10781 /* Builtins with folding operations that operate on "..." arguments
10782 need special handling; we need to store the arguments in a convenient
10783 data structure before attempting any folding. Fortunately there are
10784 only a few builtins that fall into this category. FNDECL is the
10785 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10786 result of the function call is ignored. */
10788 static tree
10789 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10791 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10792 tree ret = NULL_TREE;
10794 switch (fcode)
10796 case BUILT_IN_SPRINTF_CHK:
10797 case BUILT_IN_VSPRINTF_CHK:
10798 ret = fold_builtin_sprintf_chk (exp, fcode);
10799 break;
10801 case BUILT_IN_SNPRINTF_CHK:
10802 case BUILT_IN_VSNPRINTF_CHK:
10803 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10804 break;
10806 case BUILT_IN_FPCLASSIFY:
10807 ret = fold_builtin_fpclassify (exp);
10808 break;
10810 default:
10811 break;
10813 if (ret)
10815 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10816 TREE_NO_WARNING (ret) = 1;
10817 return ret;
10819 return NULL_TREE;
10822 /* Return true if FNDECL shouldn't be folded right now.
10823 If a built-in function has an inline attribute always_inline
10824 wrapper, defer folding it after always_inline functions have
10825 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10826 might not be performed. */
10828 static bool
10829 avoid_folding_inline_builtin (tree fndecl)
10831 return (DECL_DECLARED_INLINE_P (fndecl)
10832 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10833 && cfun
10834 && !cfun->always_inline_functions_inlined
10835 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10838 /* A wrapper function for builtin folding that prevents warnings for
10839 "statement without effect" and the like, caused by removing the
10840 call node earlier than the warning is generated. */
10842 tree
10843 fold_call_expr (tree exp, bool ignore)
10845 tree ret = NULL_TREE;
10846 tree fndecl = get_callee_fndecl (exp);
10847 if (fndecl
10848 && TREE_CODE (fndecl) == FUNCTION_DECL
10849 && DECL_BUILT_IN (fndecl)
10850 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10851 yet. Defer folding until we see all the arguments
10852 (after inlining). */
10853 && !CALL_EXPR_VA_ARG_PACK (exp))
10855 int nargs = call_expr_nargs (exp);
10857 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10858 instead last argument is __builtin_va_arg_pack (). Defer folding
10859 even in that case, until arguments are finalized. */
10860 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10862 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10863 if (fndecl2
10864 && TREE_CODE (fndecl2) == FUNCTION_DECL
10865 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10866 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10867 return NULL_TREE;
10870 if (avoid_folding_inline_builtin (fndecl))
10871 return NULL_TREE;
10873 /* FIXME: Don't use a list in this interface. */
10874 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10875 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10876 else
10878 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10880 tree *args = CALL_EXPR_ARGP (exp);
10881 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10883 if (!ret)
10884 ret = fold_builtin_varargs (fndecl, exp, ignore);
10885 if (ret)
10887 /* Propagate location information from original call to
10888 expansion of builtin. Otherwise things like
10889 maybe_emit_chk_warning, that operate on the expansion
10890 of a builtin, will use the wrong location information. */
10891 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10893 tree realret = ret;
10894 if (TREE_CODE (ret) == NOP_EXPR)
10895 realret = TREE_OPERAND (ret, 0);
10896 if (CAN_HAVE_LOCATION_P (realret)
10897 && !EXPR_HAS_LOCATION (realret))
10898 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10899 return realret;
10901 return ret;
10905 return NULL_TREE;
10908 /* Conveniently construct a function call expression. FNDECL names the
10909 function to be called and ARGLIST is a TREE_LIST of arguments. */
10911 tree
10912 build_function_call_expr (tree fndecl, tree arglist)
10914 tree fntype = TREE_TYPE (fndecl);
10915 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10916 int n = list_length (arglist);
10917 tree *argarray = (tree *) alloca (n * sizeof (tree));
10918 int i;
10920 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10921 argarray[i] = TREE_VALUE (arglist);
10922 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10925 /* Conveniently construct a function call expression. FNDECL names the
10926 function to be called, N is the number of arguments, and the "..."
10927 parameters are the argument expressions. */
10929 tree
10930 build_call_expr (tree fndecl, int n, ...)
10932 va_list ap;
10933 tree fntype = TREE_TYPE (fndecl);
10934 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10935 tree *argarray = (tree *) alloca (n * sizeof (tree));
10936 int i;
10938 va_start (ap, n);
10939 for (i = 0; i < n; i++)
10940 argarray[i] = va_arg (ap, tree);
10941 va_end (ap);
10942 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10945 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10946 N arguments are passed in the array ARGARRAY. */
10948 tree
10949 fold_builtin_call_array (tree type,
10950 tree fn,
10951 int n,
10952 tree *argarray)
10954 tree ret = NULL_TREE;
10955 int i;
10956 tree exp;
10958 if (TREE_CODE (fn) == ADDR_EXPR)
10960 tree fndecl = TREE_OPERAND (fn, 0);
10961 if (TREE_CODE (fndecl) == FUNCTION_DECL
10962 && DECL_BUILT_IN (fndecl))
10964 /* If last argument is __builtin_va_arg_pack (), arguments to this
10965 function are not finalized yet. Defer folding until they are. */
10966 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10968 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10969 if (fndecl2
10970 && TREE_CODE (fndecl2) == FUNCTION_DECL
10971 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10972 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10973 return build_call_array (type, fn, n, argarray);
10975 if (avoid_folding_inline_builtin (fndecl))
10976 return build_call_array (type, fn, n, argarray);
10977 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10979 tree arglist = NULL_TREE;
10980 for (i = n - 1; i >= 0; i--)
10981 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10982 ret = targetm.fold_builtin (fndecl, arglist, false);
10983 if (ret)
10984 return ret;
10985 return build_call_array (type, fn, n, argarray);
10987 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10989 /* First try the transformations that don't require consing up
10990 an exp. */
10991 ret = fold_builtin_n (fndecl, argarray, n, false);
10992 if (ret)
10993 return ret;
10996 /* If we got this far, we need to build an exp. */
10997 exp = build_call_array (type, fn, n, argarray);
10998 ret = fold_builtin_varargs (fndecl, exp, false);
10999 return ret ? ret : exp;
11003 return build_call_array (type, fn, n, argarray);
11006 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11007 along with N new arguments specified as the "..." parameters. SKIP
11008 is the number of arguments in EXP to be omitted. This function is used
11009 to do varargs-to-varargs transformations. */
11011 static tree
11012 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
11014 int oldnargs = call_expr_nargs (exp);
11015 int nargs = oldnargs - skip + n;
11016 tree fntype = TREE_TYPE (fndecl);
11017 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11018 tree *buffer;
11020 if (n > 0)
11022 int i, j;
11023 va_list ap;
11025 buffer = XALLOCAVEC (tree, nargs);
11026 va_start (ap, n);
11027 for (i = 0; i < n; i++)
11028 buffer[i] = va_arg (ap, tree);
11029 va_end (ap);
11030 for (j = skip; j < oldnargs; j++, i++)
11031 buffer[i] = CALL_EXPR_ARG (exp, j);
11033 else
11034 buffer = CALL_EXPR_ARGP (exp) + skip;
11036 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
11039 /* Validate a single argument ARG against a tree code CODE representing
11040 a type. */
11042 static bool
11043 validate_arg (const_tree arg, enum tree_code code)
11045 if (!arg)
11046 return false;
11047 else if (code == POINTER_TYPE)
11048 return POINTER_TYPE_P (TREE_TYPE (arg));
11049 else if (code == INTEGER_TYPE)
11050 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11051 return code == TREE_CODE (TREE_TYPE (arg));
11054 /* This function validates the types of a function call argument list
11055 against a specified list of tree_codes. If the last specifier is a 0,
11056 that represents an ellipses, otherwise the last specifier must be a
11057 VOID_TYPE.
11059 This is the GIMPLE version of validate_arglist. Eventually we want to
11060 completely convert builtins.c to work from GIMPLEs and the tree based
11061 validate_arglist will then be removed. */
11063 bool
11064 validate_gimple_arglist (const_gimple call, ...)
11066 enum tree_code code;
11067 bool res = 0;
11068 va_list ap;
11069 const_tree arg;
11070 size_t i;
11072 va_start (ap, call);
11073 i = 0;
11077 code = va_arg (ap, enum tree_code);
11078 switch (code)
11080 case 0:
11081 /* This signifies an ellipses, any further arguments are all ok. */
11082 res = true;
11083 goto end;
11084 case VOID_TYPE:
11085 /* This signifies an endlink, if no arguments remain, return
11086 true, otherwise return false. */
11087 res = (i == gimple_call_num_args (call));
11088 goto end;
11089 default:
11090 /* If no parameters remain or the parameter's code does not
11091 match the specified code, return false. Otherwise continue
11092 checking any remaining arguments. */
11093 arg = gimple_call_arg (call, i++);
11094 if (!validate_arg (arg, code))
11095 goto end;
11096 break;
11099 while (1);
11101 /* We need gotos here since we can only have one VA_CLOSE in a
11102 function. */
11103 end: ;
11104 va_end (ap);
11106 return res;
11109 /* This function validates the types of a function call argument list
11110 against a specified list of tree_codes. If the last specifier is a 0,
11111 that represents an ellipses, otherwise the last specifier must be a
11112 VOID_TYPE. */
11114 bool
11115 validate_arglist (const_tree callexpr, ...)
11117 enum tree_code code;
11118 bool res = 0;
11119 va_list ap;
11120 const_call_expr_arg_iterator iter;
11121 const_tree arg;
11123 va_start (ap, callexpr);
11124 init_const_call_expr_arg_iterator (callexpr, &iter);
11128 code = va_arg (ap, enum tree_code);
11129 switch (code)
11131 case 0:
11132 /* This signifies an ellipses, any further arguments are all ok. */
11133 res = true;
11134 goto end;
11135 case VOID_TYPE:
11136 /* This signifies an endlink, if no arguments remain, return
11137 true, otherwise return false. */
11138 res = !more_const_call_expr_args_p (&iter);
11139 goto end;
11140 default:
11141 /* If no parameters remain or the parameter's code does not
11142 match the specified code, return false. Otherwise continue
11143 checking any remaining arguments. */
11144 arg = next_const_call_expr_arg (&iter);
11145 if (!validate_arg (arg, code))
11146 goto end;
11147 break;
11150 while (1);
11152 /* We need gotos here since we can only have one VA_CLOSE in a
11153 function. */
11154 end: ;
11155 va_end (ap);
11157 return res;
11160 /* Default target-specific builtin expander that does nothing. */
11163 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11164 rtx target ATTRIBUTE_UNUSED,
11165 rtx subtarget ATTRIBUTE_UNUSED,
11166 enum machine_mode mode ATTRIBUTE_UNUSED,
11167 int ignore ATTRIBUTE_UNUSED)
11169 return NULL_RTX;
11172 /* Returns true is EXP represents data that would potentially reside
11173 in a readonly section. */
11175 static bool
11176 readonly_data_expr (tree exp)
11178 STRIP_NOPS (exp);
11180 if (TREE_CODE (exp) != ADDR_EXPR)
11181 return false;
11183 exp = get_base_address (TREE_OPERAND (exp, 0));
11184 if (!exp)
11185 return false;
11187 /* Make sure we call decl_readonly_section only for trees it
11188 can handle (since it returns true for everything it doesn't
11189 understand). */
11190 if (TREE_CODE (exp) == STRING_CST
11191 || TREE_CODE (exp) == CONSTRUCTOR
11192 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11193 return decl_readonly_section (exp, 0);
11194 else
11195 return false;
11198 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11199 to the call, and TYPE is its return type.
11201 Return NULL_TREE if no simplification was possible, otherwise return the
11202 simplified form of the call as a tree.
11204 The simplified form may be a constant or other expression which
11205 computes the same value, but in a more efficient manner (including
11206 calls to other builtin functions).
11208 The call may contain arguments which need to be evaluated, but
11209 which are not useful to determine the result of the call. In
11210 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11211 COMPOUND_EXPR will be an argument which must be evaluated.
11212 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11213 COMPOUND_EXPR in the chain will contain the tree for the simplified
11214 form of the builtin function call. */
11216 static tree
11217 fold_builtin_strstr (tree s1, tree s2, tree type)
11219 if (!validate_arg (s1, POINTER_TYPE)
11220 || !validate_arg (s2, POINTER_TYPE))
11221 return NULL_TREE;
11222 else
11224 tree fn;
11225 const char *p1, *p2;
11227 p2 = c_getstr (s2);
11228 if (p2 == NULL)
11229 return NULL_TREE;
11231 p1 = c_getstr (s1);
11232 if (p1 != NULL)
11234 const char *r = strstr (p1, p2);
11235 tree tem;
11237 if (r == NULL)
11238 return build_int_cst (TREE_TYPE (s1), 0);
11240 /* Return an offset into the constant string argument. */
11241 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11242 s1, size_int (r - p1));
11243 return fold_convert (type, tem);
11246 /* The argument is const char *, and the result is char *, so we need
11247 a type conversion here to avoid a warning. */
11248 if (p2[0] == '\0')
11249 return fold_convert (type, s1);
11251 if (p2[1] != '\0')
11252 return NULL_TREE;
11254 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11255 if (!fn)
11256 return NULL_TREE;
11258 /* New argument list transforming strstr(s1, s2) to
11259 strchr(s1, s2[0]). */
11260 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11264 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11265 the call, and TYPE is its return type.
11267 Return NULL_TREE if no simplification was possible, otherwise return the
11268 simplified form of the call as a tree.
11270 The simplified form may be a constant or other expression which
11271 computes the same value, but in a more efficient manner (including
11272 calls to other builtin functions).
11274 The call may contain arguments which need to be evaluated, but
11275 which are not useful to determine the result of the call. In
11276 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11277 COMPOUND_EXPR will be an argument which must be evaluated.
11278 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11279 COMPOUND_EXPR in the chain will contain the tree for the simplified
11280 form of the builtin function call. */
11282 static tree
11283 fold_builtin_strchr (tree s1, tree s2, tree type)
11285 if (!validate_arg (s1, POINTER_TYPE)
11286 || !validate_arg (s2, INTEGER_TYPE))
11287 return NULL_TREE;
11288 else
11290 const char *p1;
11292 if (TREE_CODE (s2) != INTEGER_CST)
11293 return NULL_TREE;
11295 p1 = c_getstr (s1);
11296 if (p1 != NULL)
11298 char c;
11299 const char *r;
11300 tree tem;
11302 if (target_char_cast (s2, &c))
11303 return NULL_TREE;
11305 r = strchr (p1, c);
11307 if (r == NULL)
11308 return build_int_cst (TREE_TYPE (s1), 0);
11310 /* Return an offset into the constant string argument. */
11311 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11312 s1, size_int (r - p1));
11313 return fold_convert (type, tem);
11315 return NULL_TREE;
11319 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11320 the call, and TYPE is its return type.
11322 Return NULL_TREE if no simplification was possible, otherwise return the
11323 simplified form of the call as a tree.
11325 The simplified form may be a constant or other expression which
11326 computes the same value, but in a more efficient manner (including
11327 calls to other builtin functions).
11329 The call may contain arguments which need to be evaluated, but
11330 which are not useful to determine the result of the call. In
11331 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11332 COMPOUND_EXPR will be an argument which must be evaluated.
11333 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11334 COMPOUND_EXPR in the chain will contain the tree for the simplified
11335 form of the builtin function call. */
11337 static tree
11338 fold_builtin_strrchr (tree s1, tree s2, tree type)
11340 if (!validate_arg (s1, POINTER_TYPE)
11341 || !validate_arg (s2, INTEGER_TYPE))
11342 return NULL_TREE;
11343 else
11345 tree fn;
11346 const char *p1;
11348 if (TREE_CODE (s2) != INTEGER_CST)
11349 return NULL_TREE;
11351 p1 = c_getstr (s1);
11352 if (p1 != NULL)
11354 char c;
11355 const char *r;
11356 tree tem;
11358 if (target_char_cast (s2, &c))
11359 return NULL_TREE;
11361 r = strrchr (p1, c);
11363 if (r == NULL)
11364 return build_int_cst (TREE_TYPE (s1), 0);
11366 /* Return an offset into the constant string argument. */
11367 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11368 s1, size_int (r - p1));
11369 return fold_convert (type, tem);
11372 if (! integer_zerop (s2))
11373 return NULL_TREE;
11375 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11376 if (!fn)
11377 return NULL_TREE;
11379 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11380 return build_call_expr (fn, 2, s1, s2);
11384 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11385 to the call, and TYPE is its return type.
11387 Return NULL_TREE if no simplification was possible, otherwise return the
11388 simplified form of the call as a tree.
11390 The simplified form may be a constant or other expression which
11391 computes the same value, but in a more efficient manner (including
11392 calls to other builtin functions).
11394 The call may contain arguments which need to be evaluated, but
11395 which are not useful to determine the result of the call. In
11396 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11397 COMPOUND_EXPR will be an argument which must be evaluated.
11398 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11399 COMPOUND_EXPR in the chain will contain the tree for the simplified
11400 form of the builtin function call. */
11402 static tree
11403 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11405 if (!validate_arg (s1, POINTER_TYPE)
11406 || !validate_arg (s2, POINTER_TYPE))
11407 return NULL_TREE;
11408 else
11410 tree fn;
11411 const char *p1, *p2;
11413 p2 = c_getstr (s2);
11414 if (p2 == NULL)
11415 return NULL_TREE;
11417 p1 = c_getstr (s1);
11418 if (p1 != NULL)
11420 const char *r = strpbrk (p1, p2);
11421 tree tem;
11423 if (r == NULL)
11424 return build_int_cst (TREE_TYPE (s1), 0);
11426 /* Return an offset into the constant string argument. */
11427 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11428 s1, size_int (r - p1));
11429 return fold_convert (type, tem);
11432 if (p2[0] == '\0')
11433 /* strpbrk(x, "") == NULL.
11434 Evaluate and ignore s1 in case it had side-effects. */
11435 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11437 if (p2[1] != '\0')
11438 return NULL_TREE; /* Really call strpbrk. */
11440 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11441 if (!fn)
11442 return NULL_TREE;
11444 /* New argument list transforming strpbrk(s1, s2) to
11445 strchr(s1, s2[0]). */
11446 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11450 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11451 to the call.
11453 Return NULL_TREE if no simplification was possible, otherwise return the
11454 simplified form of the call as a tree.
11456 The simplified form may be a constant or other expression which
11457 computes the same value, but in a more efficient manner (including
11458 calls to other builtin functions).
11460 The call may contain arguments which need to be evaluated, but
11461 which are not useful to determine the result of the call. In
11462 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11463 COMPOUND_EXPR will be an argument which must be evaluated.
11464 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11465 COMPOUND_EXPR in the chain will contain the tree for the simplified
11466 form of the builtin function call. */
11468 static tree
11469 fold_builtin_strcat (tree dst, tree src)
11471 if (!validate_arg (dst, POINTER_TYPE)
11472 || !validate_arg (src, POINTER_TYPE))
11473 return NULL_TREE;
11474 else
11476 const char *p = c_getstr (src);
11478 /* If the string length is zero, return the dst parameter. */
11479 if (p && *p == '\0')
11480 return dst;
11482 return NULL_TREE;
11486 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11487 arguments to the call.
11489 Return NULL_TREE if no simplification was possible, otherwise return the
11490 simplified form of the call as a tree.
11492 The simplified form may be a constant or other expression which
11493 computes the same value, but in a more efficient manner (including
11494 calls to other builtin functions).
11496 The call may contain arguments which need to be evaluated, but
11497 which are not useful to determine the result of the call. In
11498 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11499 COMPOUND_EXPR will be an argument which must be evaluated.
11500 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11501 COMPOUND_EXPR in the chain will contain the tree for the simplified
11502 form of the builtin function call. */
11504 static tree
11505 fold_builtin_strncat (tree dst, tree src, tree len)
11507 if (!validate_arg (dst, POINTER_TYPE)
11508 || !validate_arg (src, POINTER_TYPE)
11509 || !validate_arg (len, INTEGER_TYPE))
11510 return NULL_TREE;
11511 else
11513 const char *p = c_getstr (src);
11515 /* If the requested length is zero, or the src parameter string
11516 length is zero, return the dst parameter. */
11517 if (integer_zerop (len) || (p && *p == '\0'))
11518 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11520 /* If the requested len is greater than or equal to the string
11521 length, call strcat. */
11522 if (TREE_CODE (len) == INTEGER_CST && p
11523 && compare_tree_int (len, strlen (p)) >= 0)
11525 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11527 /* If the replacement _DECL isn't initialized, don't do the
11528 transformation. */
11529 if (!fn)
11530 return NULL_TREE;
11532 return build_call_expr (fn, 2, dst, src);
11534 return NULL_TREE;
11538 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11539 to the call.
11541 Return NULL_TREE if no simplification was possible, otherwise return the
11542 simplified form of the call as a tree.
11544 The simplified form may be a constant or other expression which
11545 computes the same value, but in a more efficient manner (including
11546 calls to other builtin functions).
11548 The call may contain arguments which need to be evaluated, but
11549 which are not useful to determine the result of the call. In
11550 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11551 COMPOUND_EXPR will be an argument which must be evaluated.
11552 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11553 COMPOUND_EXPR in the chain will contain the tree for the simplified
11554 form of the builtin function call. */
11556 static tree
11557 fold_builtin_strspn (tree s1, tree s2)
11559 if (!validate_arg (s1, POINTER_TYPE)
11560 || !validate_arg (s2, POINTER_TYPE))
11561 return NULL_TREE;
11562 else
11564 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11566 /* If both arguments are constants, evaluate at compile-time. */
11567 if (p1 && p2)
11569 const size_t r = strspn (p1, p2);
11570 return size_int (r);
11573 /* If either argument is "", return NULL_TREE. */
11574 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11575 /* Evaluate and ignore both arguments in case either one has
11576 side-effects. */
11577 return omit_two_operands (size_type_node, size_zero_node,
11578 s1, s2);
11579 return NULL_TREE;
11583 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11584 to the call.
11586 Return NULL_TREE if no simplification was possible, otherwise return the
11587 simplified form of the call as a tree.
11589 The simplified form may be a constant or other expression which
11590 computes the same value, but in a more efficient manner (including
11591 calls to other builtin functions).
11593 The call may contain arguments which need to be evaluated, but
11594 which are not useful to determine the result of the call. In
11595 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11596 COMPOUND_EXPR will be an argument which must be evaluated.
11597 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11598 COMPOUND_EXPR in the chain will contain the tree for the simplified
11599 form of the builtin function call. */
11601 static tree
11602 fold_builtin_strcspn (tree s1, tree s2)
11604 if (!validate_arg (s1, POINTER_TYPE)
11605 || !validate_arg (s2, POINTER_TYPE))
11606 return NULL_TREE;
11607 else
11609 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11611 /* If both arguments are constants, evaluate at compile-time. */
11612 if (p1 && p2)
11614 const size_t r = strcspn (p1, p2);
11615 return size_int (r);
11618 /* If the first argument is "", return NULL_TREE. */
11619 if (p1 && *p1 == '\0')
11621 /* Evaluate and ignore argument s2 in case it has
11622 side-effects. */
11623 return omit_one_operand (size_type_node,
11624 size_zero_node, s2);
11627 /* If the second argument is "", return __builtin_strlen(s1). */
11628 if (p2 && *p2 == '\0')
11630 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11632 /* If the replacement _DECL isn't initialized, don't do the
11633 transformation. */
11634 if (!fn)
11635 return NULL_TREE;
11637 return build_call_expr (fn, 1, s1);
11639 return NULL_TREE;
11643 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11644 to the call. IGNORE is true if the value returned
11645 by the builtin will be ignored. UNLOCKED is true is true if this
11646 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11647 the known length of the string. Return NULL_TREE if no simplification
11648 was possible. */
11650 tree
11651 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11653 /* If we're using an unlocked function, assume the other unlocked
11654 functions exist explicitly. */
11655 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11656 : implicit_built_in_decls[BUILT_IN_FPUTC];
11657 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11658 : implicit_built_in_decls[BUILT_IN_FWRITE];
11660 /* If the return value is used, don't do the transformation. */
11661 if (!ignore)
11662 return NULL_TREE;
11664 /* Verify the arguments in the original call. */
11665 if (!validate_arg (arg0, POINTER_TYPE)
11666 || !validate_arg (arg1, POINTER_TYPE))
11667 return NULL_TREE;
11669 if (! len)
11670 len = c_strlen (arg0, 0);
11672 /* Get the length of the string passed to fputs. If the length
11673 can't be determined, punt. */
11674 if (!len
11675 || TREE_CODE (len) != INTEGER_CST)
11676 return NULL_TREE;
11678 switch (compare_tree_int (len, 1))
11680 case -1: /* length is 0, delete the call entirely . */
11681 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11683 case 0: /* length is 1, call fputc. */
11685 const char *p = c_getstr (arg0);
11687 if (p != NULL)
11689 if (fn_fputc)
11690 return build_call_expr (fn_fputc, 2,
11691 build_int_cst (NULL_TREE, p[0]), arg1);
11692 else
11693 return NULL_TREE;
11696 /* FALLTHROUGH */
11697 case 1: /* length is greater than 1, call fwrite. */
11699 /* If optimizing for size keep fputs. */
11700 if (optimize_function_for_size_p (cfun))
11701 return NULL_TREE;
11702 /* New argument list transforming fputs(string, stream) to
11703 fwrite(string, 1, len, stream). */
11704 if (fn_fwrite)
11705 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11706 else
11707 return NULL_TREE;
11709 default:
11710 gcc_unreachable ();
11712 return NULL_TREE;
11715 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11716 produced. False otherwise. This is done so that we don't output the error
11717 or warning twice or three times. */
11719 bool
11720 fold_builtin_next_arg (tree exp, bool va_start_p)
11722 tree fntype = TREE_TYPE (current_function_decl);
11723 int nargs = call_expr_nargs (exp);
11724 tree arg;
11726 if (TYPE_ARG_TYPES (fntype) == 0
11727 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11728 == void_type_node))
11730 error ("%<va_start%> used in function with fixed args");
11731 return true;
11734 if (va_start_p)
11736 if (va_start_p && (nargs != 2))
11738 error ("wrong number of arguments to function %<va_start%>");
11739 return true;
11741 arg = CALL_EXPR_ARG (exp, 1);
11743 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11744 when we checked the arguments and if needed issued a warning. */
11745 else
11747 if (nargs == 0)
11749 /* Evidently an out of date version of <stdarg.h>; can't validate
11750 va_start's second argument, but can still work as intended. */
11751 warning (0, "%<__builtin_next_arg%> called without an argument");
11752 return true;
11754 else if (nargs > 1)
11756 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11757 return true;
11759 arg = CALL_EXPR_ARG (exp, 0);
11762 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11763 or __builtin_next_arg (0) the first time we see it, after checking
11764 the arguments and if needed issuing a warning. */
11765 if (!integer_zerop (arg))
11767 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11769 /* Strip off all nops for the sake of the comparison. This
11770 is not quite the same as STRIP_NOPS. It does more.
11771 We must also strip off INDIRECT_EXPR for C++ reference
11772 parameters. */
11773 while (CONVERT_EXPR_P (arg)
11774 || TREE_CODE (arg) == INDIRECT_REF)
11775 arg = TREE_OPERAND (arg, 0);
11776 if (arg != last_parm)
11778 /* FIXME: Sometimes with the tree optimizers we can get the
11779 not the last argument even though the user used the last
11780 argument. We just warn and set the arg to be the last
11781 argument so that we will get wrong-code because of
11782 it. */
11783 warning (0, "second parameter of %<va_start%> not last named argument");
11786 /* Undefined by C99 7.15.1.4p4 (va_start):
11787 "If the parameter parmN is declared with the register storage
11788 class, with a function or array type, or with a type that is
11789 not compatible with the type that results after application of
11790 the default argument promotions, the behavior is undefined."
11792 else if (DECL_REGISTER (arg))
11793 warning (0, "undefined behaviour when second parameter of "
11794 "%<va_start%> is declared with %<register%> storage");
11796 /* We want to verify the second parameter just once before the tree
11797 optimizers are run and then avoid keeping it in the tree,
11798 as otherwise we could warn even for correct code like:
11799 void foo (int i, ...)
11800 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11801 if (va_start_p)
11802 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11803 else
11804 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11806 return false;
11810 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11811 ORIG may be null if this is a 2-argument call. We don't attempt to
11812 simplify calls with more than 3 arguments.
11814 Return NULL_TREE if no simplification was possible, otherwise return the
11815 simplified form of the call as a tree. If IGNORED is true, it means that
11816 the caller does not use the returned value of the function. */
11818 static tree
11819 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11821 tree call, retval;
11822 const char *fmt_str = NULL;
11824 /* Verify the required arguments in the original call. We deal with two
11825 types of sprintf() calls: 'sprintf (str, fmt)' and
11826 'sprintf (dest, "%s", orig)'. */
11827 if (!validate_arg (dest, POINTER_TYPE)
11828 || !validate_arg (fmt, POINTER_TYPE))
11829 return NULL_TREE;
11830 if (orig && !validate_arg (orig, POINTER_TYPE))
11831 return NULL_TREE;
11833 /* Check whether the format is a literal string constant. */
11834 fmt_str = c_getstr (fmt);
11835 if (fmt_str == NULL)
11836 return NULL_TREE;
11838 call = NULL_TREE;
11839 retval = NULL_TREE;
11841 if (!init_target_chars ())
11842 return NULL_TREE;
11844 /* If the format doesn't contain % args or %%, use strcpy. */
11845 if (strchr (fmt_str, target_percent) == NULL)
11847 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11849 if (!fn)
11850 return NULL_TREE;
11852 /* Don't optimize sprintf (buf, "abc", ptr++). */
11853 if (orig)
11854 return NULL_TREE;
11856 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11857 'format' is known to contain no % formats. */
11858 call = build_call_expr (fn, 2, dest, fmt);
11859 if (!ignored)
11860 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11863 /* If the format is "%s", use strcpy if the result isn't used. */
11864 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11866 tree fn;
11867 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11869 if (!fn)
11870 return NULL_TREE;
11872 /* Don't crash on sprintf (str1, "%s"). */
11873 if (!orig)
11874 return NULL_TREE;
11876 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11877 if (!ignored)
11879 retval = c_strlen (orig, 1);
11880 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11881 return NULL_TREE;
11883 call = build_call_expr (fn, 2, dest, orig);
11886 if (call && retval)
11888 retval = fold_convert
11889 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11890 retval);
11891 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11893 else
11894 return call;
11897 /* Expand a call EXP to __builtin_object_size. */
11900 expand_builtin_object_size (tree exp)
11902 tree ost;
11903 int object_size_type;
11904 tree fndecl = get_callee_fndecl (exp);
11906 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11908 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11909 exp, fndecl);
11910 expand_builtin_trap ();
11911 return const0_rtx;
11914 ost = CALL_EXPR_ARG (exp, 1);
11915 STRIP_NOPS (ost);
11917 if (TREE_CODE (ost) != INTEGER_CST
11918 || tree_int_cst_sgn (ost) < 0
11919 || compare_tree_int (ost, 3) > 0)
11921 error ("%Klast argument of %D is not integer constant between 0 and 3",
11922 exp, fndecl);
11923 expand_builtin_trap ();
11924 return const0_rtx;
11927 object_size_type = tree_low_cst (ost, 0);
11929 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11932 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11933 FCODE is the BUILT_IN_* to use.
11934 Return NULL_RTX if we failed; the caller should emit a normal call,
11935 otherwise try to get the result in TARGET, if convenient (and in
11936 mode MODE if that's convenient). */
11938 static rtx
11939 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11940 enum built_in_function fcode)
11942 tree dest, src, len, size;
11944 if (!validate_arglist (exp,
11945 POINTER_TYPE,
11946 fcode == BUILT_IN_MEMSET_CHK
11947 ? INTEGER_TYPE : POINTER_TYPE,
11948 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11949 return NULL_RTX;
11951 dest = CALL_EXPR_ARG (exp, 0);
11952 src = CALL_EXPR_ARG (exp, 1);
11953 len = CALL_EXPR_ARG (exp, 2);
11954 size = CALL_EXPR_ARG (exp, 3);
11956 if (! host_integerp (size, 1))
11957 return NULL_RTX;
11959 if (host_integerp (len, 1) || integer_all_onesp (size))
11961 tree fn;
11963 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11965 warning (0, "%Kcall to %D will always overflow destination buffer",
11966 exp, get_callee_fndecl (exp));
11967 return NULL_RTX;
11970 fn = NULL_TREE;
11971 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11972 mem{cpy,pcpy,move,set} is available. */
11973 switch (fcode)
11975 case BUILT_IN_MEMCPY_CHK:
11976 fn = built_in_decls[BUILT_IN_MEMCPY];
11977 break;
11978 case BUILT_IN_MEMPCPY_CHK:
11979 fn = built_in_decls[BUILT_IN_MEMPCPY];
11980 break;
11981 case BUILT_IN_MEMMOVE_CHK:
11982 fn = built_in_decls[BUILT_IN_MEMMOVE];
11983 break;
11984 case BUILT_IN_MEMSET_CHK:
11985 fn = built_in_decls[BUILT_IN_MEMSET];
11986 break;
11987 default:
11988 break;
11991 if (! fn)
11992 return NULL_RTX;
11994 fn = build_call_expr (fn, 3, dest, src, len);
11995 STRIP_TYPE_NOPS (fn);
11996 while (TREE_CODE (fn) == COMPOUND_EXPR)
11998 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11999 EXPAND_NORMAL);
12000 fn = TREE_OPERAND (fn, 1);
12002 if (TREE_CODE (fn) == CALL_EXPR)
12003 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12004 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12006 else if (fcode == BUILT_IN_MEMSET_CHK)
12007 return NULL_RTX;
12008 else
12010 unsigned int dest_align
12011 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12013 /* If DEST is not a pointer type, call the normal function. */
12014 if (dest_align == 0)
12015 return NULL_RTX;
12017 /* If SRC and DEST are the same (and not volatile), do nothing. */
12018 if (operand_equal_p (src, dest, 0))
12020 tree expr;
12022 if (fcode != BUILT_IN_MEMPCPY_CHK)
12024 /* Evaluate and ignore LEN in case it has side-effects. */
12025 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12026 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12029 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12030 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12033 /* __memmove_chk special case. */
12034 if (fcode == BUILT_IN_MEMMOVE_CHK)
12036 unsigned int src_align
12037 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12039 if (src_align == 0)
12040 return NULL_RTX;
12042 /* If src is categorized for a readonly section we can use
12043 normal __memcpy_chk. */
12044 if (readonly_data_expr (src))
12046 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12047 if (!fn)
12048 return NULL_RTX;
12049 fn = build_call_expr (fn, 4, dest, src, len, size);
12050 STRIP_TYPE_NOPS (fn);
12051 while (TREE_CODE (fn) == COMPOUND_EXPR)
12053 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12054 EXPAND_NORMAL);
12055 fn = TREE_OPERAND (fn, 1);
12057 if (TREE_CODE (fn) == CALL_EXPR)
12058 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12059 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12062 return NULL_RTX;
12066 /* Emit warning if a buffer overflow is detected at compile time. */
12068 static void
12069 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12071 int is_strlen = 0;
12072 tree len, size;
12074 switch (fcode)
12076 case BUILT_IN_STRCPY_CHK:
12077 case BUILT_IN_STPCPY_CHK:
12078 /* For __strcat_chk the warning will be emitted only if overflowing
12079 by at least strlen (dest) + 1 bytes. */
12080 case BUILT_IN_STRCAT_CHK:
12081 len = CALL_EXPR_ARG (exp, 1);
12082 size = CALL_EXPR_ARG (exp, 2);
12083 is_strlen = 1;
12084 break;
12085 case BUILT_IN_STRNCAT_CHK:
12086 case BUILT_IN_STRNCPY_CHK:
12087 len = CALL_EXPR_ARG (exp, 2);
12088 size = CALL_EXPR_ARG (exp, 3);
12089 break;
12090 case BUILT_IN_SNPRINTF_CHK:
12091 case BUILT_IN_VSNPRINTF_CHK:
12092 len = CALL_EXPR_ARG (exp, 1);
12093 size = CALL_EXPR_ARG (exp, 3);
12094 break;
12095 default:
12096 gcc_unreachable ();
12099 if (!len || !size)
12100 return;
12102 if (! host_integerp (size, 1) || integer_all_onesp (size))
12103 return;
12105 if (is_strlen)
12107 len = c_strlen (len, 1);
12108 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12109 return;
12111 else if (fcode == BUILT_IN_STRNCAT_CHK)
12113 tree src = CALL_EXPR_ARG (exp, 1);
12114 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12115 return;
12116 src = c_strlen (src, 1);
12117 if (! src || ! host_integerp (src, 1))
12119 warning (0, "%Kcall to %D might overflow destination buffer",
12120 exp, get_callee_fndecl (exp));
12121 return;
12123 else if (tree_int_cst_lt (src, size))
12124 return;
12126 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12127 return;
12129 warning (0, "%Kcall to %D will always overflow destination buffer",
12130 exp, get_callee_fndecl (exp));
12133 /* Emit warning if a buffer overflow is detected at compile time
12134 in __sprintf_chk/__vsprintf_chk calls. */
12136 static void
12137 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12139 tree dest, size, len, fmt, flag;
12140 const char *fmt_str;
12141 int nargs = call_expr_nargs (exp);
12143 /* Verify the required arguments in the original call. */
12145 if (nargs < 4)
12146 return;
12147 dest = CALL_EXPR_ARG (exp, 0);
12148 flag = CALL_EXPR_ARG (exp, 1);
12149 size = CALL_EXPR_ARG (exp, 2);
12150 fmt = CALL_EXPR_ARG (exp, 3);
12152 if (! host_integerp (size, 1) || integer_all_onesp (size))
12153 return;
12155 /* Check whether the format is a literal string constant. */
12156 fmt_str = c_getstr (fmt);
12157 if (fmt_str == NULL)
12158 return;
12160 if (!init_target_chars ())
12161 return;
12163 /* If the format doesn't contain % args or %%, we know its size. */
12164 if (strchr (fmt_str, target_percent) == 0)
12165 len = build_int_cstu (size_type_node, strlen (fmt_str));
12166 /* If the format is "%s" and first ... argument is a string literal,
12167 we know it too. */
12168 else if (fcode == BUILT_IN_SPRINTF_CHK
12169 && strcmp (fmt_str, target_percent_s) == 0)
12171 tree arg;
12173 if (nargs < 5)
12174 return;
12175 arg = CALL_EXPR_ARG (exp, 4);
12176 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12177 return;
12179 len = c_strlen (arg, 1);
12180 if (!len || ! host_integerp (len, 1))
12181 return;
12183 else
12184 return;
12186 if (! tree_int_cst_lt (len, size))
12188 warning (0, "%Kcall to %D will always overflow destination buffer",
12189 exp, get_callee_fndecl (exp));
12193 /* Emit warning if a free is called with address of a variable. */
12195 static void
12196 maybe_emit_free_warning (tree exp)
12198 tree arg = CALL_EXPR_ARG (exp, 0);
12200 STRIP_NOPS (arg);
12201 if (TREE_CODE (arg) != ADDR_EXPR)
12202 return;
12204 arg = get_base_address (TREE_OPERAND (arg, 0));
12205 if (arg == NULL || INDIRECT_REF_P (arg))
12206 return;
12208 if (SSA_VAR_P (arg))
12209 warning (0, "%Kattempt to free a non-heap object %qD", exp, arg);
12210 else
12211 warning (0, "%Kattempt to free a non-heap object", exp);
12214 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12215 if possible. */
12217 tree
12218 fold_builtin_object_size (tree ptr, tree ost)
12220 tree ret = NULL_TREE;
12221 int object_size_type;
12223 if (!validate_arg (ptr, POINTER_TYPE)
12224 || !validate_arg (ost, INTEGER_TYPE))
12225 return NULL_TREE;
12227 STRIP_NOPS (ost);
12229 if (TREE_CODE (ost) != INTEGER_CST
12230 || tree_int_cst_sgn (ost) < 0
12231 || compare_tree_int (ost, 3) > 0)
12232 return NULL_TREE;
12234 object_size_type = tree_low_cst (ost, 0);
12236 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12237 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12238 and (size_t) 0 for types 2 and 3. */
12239 if (TREE_SIDE_EFFECTS (ptr))
12240 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12242 if (TREE_CODE (ptr) == ADDR_EXPR)
12243 ret = build_int_cstu (size_type_node,
12244 compute_builtin_object_size (ptr, object_size_type));
12246 else if (TREE_CODE (ptr) == SSA_NAME)
12248 unsigned HOST_WIDE_INT bytes;
12250 /* If object size is not known yet, delay folding until
12251 later. Maybe subsequent passes will help determining
12252 it. */
12253 bytes = compute_builtin_object_size (ptr, object_size_type);
12254 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12255 ? -1 : 0))
12256 ret = build_int_cstu (size_type_node, bytes);
12259 if (ret)
12261 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12262 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12263 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12264 ret = NULL_TREE;
12267 return ret;
12270 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12271 DEST, SRC, LEN, and SIZE are the arguments to the call.
12272 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12273 code of the builtin. If MAXLEN is not NULL, it is maximum length
12274 passed as third argument. */
12276 tree
12277 fold_builtin_memory_chk (tree fndecl,
12278 tree dest, tree src, tree len, tree size,
12279 tree maxlen, bool ignore,
12280 enum built_in_function fcode)
12282 tree fn;
12284 if (!validate_arg (dest, POINTER_TYPE)
12285 || !validate_arg (src,
12286 (fcode == BUILT_IN_MEMSET_CHK
12287 ? INTEGER_TYPE : POINTER_TYPE))
12288 || !validate_arg (len, INTEGER_TYPE)
12289 || !validate_arg (size, INTEGER_TYPE))
12290 return NULL_TREE;
12292 /* If SRC and DEST are the same (and not volatile), return DEST
12293 (resp. DEST+LEN for __mempcpy_chk). */
12294 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12296 if (fcode != BUILT_IN_MEMPCPY_CHK)
12297 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12298 else
12300 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12301 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12305 if (! host_integerp (size, 1))
12306 return NULL_TREE;
12308 if (! integer_all_onesp (size))
12310 if (! host_integerp (len, 1))
12312 /* If LEN is not constant, try MAXLEN too.
12313 For MAXLEN only allow optimizing into non-_ocs function
12314 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12315 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12317 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12319 /* (void) __mempcpy_chk () can be optimized into
12320 (void) __memcpy_chk (). */
12321 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12322 if (!fn)
12323 return NULL_TREE;
12325 return build_call_expr (fn, 4, dest, src, len, size);
12327 return NULL_TREE;
12330 else
12331 maxlen = len;
12333 if (tree_int_cst_lt (size, maxlen))
12334 return NULL_TREE;
12337 fn = NULL_TREE;
12338 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12339 mem{cpy,pcpy,move,set} is available. */
12340 switch (fcode)
12342 case BUILT_IN_MEMCPY_CHK:
12343 fn = built_in_decls[BUILT_IN_MEMCPY];
12344 break;
12345 case BUILT_IN_MEMPCPY_CHK:
12346 fn = built_in_decls[BUILT_IN_MEMPCPY];
12347 break;
12348 case BUILT_IN_MEMMOVE_CHK:
12349 fn = built_in_decls[BUILT_IN_MEMMOVE];
12350 break;
12351 case BUILT_IN_MEMSET_CHK:
12352 fn = built_in_decls[BUILT_IN_MEMSET];
12353 break;
12354 default:
12355 break;
12358 if (!fn)
12359 return NULL_TREE;
12361 return build_call_expr (fn, 3, dest, src, len);
12364 /* Fold a call to the __st[rp]cpy_chk builtin.
12365 DEST, SRC, and SIZE are the arguments to the call.
12366 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12367 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12368 strings passed as second argument. */
12370 tree
12371 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12372 tree maxlen, bool ignore,
12373 enum built_in_function fcode)
12375 tree len, fn;
12377 if (!validate_arg (dest, POINTER_TYPE)
12378 || !validate_arg (src, POINTER_TYPE)
12379 || !validate_arg (size, INTEGER_TYPE))
12380 return NULL_TREE;
12382 /* If SRC and DEST are the same (and not volatile), return DEST. */
12383 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12384 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12386 if (! host_integerp (size, 1))
12387 return NULL_TREE;
12389 if (! integer_all_onesp (size))
12391 len = c_strlen (src, 1);
12392 if (! len || ! host_integerp (len, 1))
12394 /* If LEN is not constant, try MAXLEN too.
12395 For MAXLEN only allow optimizing into non-_ocs function
12396 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12397 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12399 if (fcode == BUILT_IN_STPCPY_CHK)
12401 if (! ignore)
12402 return NULL_TREE;
12404 /* If return value of __stpcpy_chk is ignored,
12405 optimize into __strcpy_chk. */
12406 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12407 if (!fn)
12408 return NULL_TREE;
12410 return build_call_expr (fn, 3, dest, src, size);
12413 if (! len || TREE_SIDE_EFFECTS (len))
12414 return NULL_TREE;
12416 /* If c_strlen returned something, but not a constant,
12417 transform __strcpy_chk into __memcpy_chk. */
12418 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12419 if (!fn)
12420 return NULL_TREE;
12422 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12423 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12424 build_call_expr (fn, 4,
12425 dest, src, len, size));
12428 else
12429 maxlen = len;
12431 if (! tree_int_cst_lt (maxlen, size))
12432 return NULL_TREE;
12435 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12436 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12437 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12438 if (!fn)
12439 return NULL_TREE;
12441 return build_call_expr (fn, 2, dest, src);
12444 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12445 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12446 length passed as third argument. */
12448 tree
12449 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12450 tree maxlen)
12452 tree fn;
12454 if (!validate_arg (dest, POINTER_TYPE)
12455 || !validate_arg (src, POINTER_TYPE)
12456 || !validate_arg (len, INTEGER_TYPE)
12457 || !validate_arg (size, INTEGER_TYPE))
12458 return NULL_TREE;
12460 if (! host_integerp (size, 1))
12461 return NULL_TREE;
12463 if (! integer_all_onesp (size))
12465 if (! host_integerp (len, 1))
12467 /* If LEN is not constant, try MAXLEN too.
12468 For MAXLEN only allow optimizing into non-_ocs function
12469 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12470 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12471 return NULL_TREE;
12473 else
12474 maxlen = len;
12476 if (tree_int_cst_lt (size, maxlen))
12477 return NULL_TREE;
12480 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12481 fn = built_in_decls[BUILT_IN_STRNCPY];
12482 if (!fn)
12483 return NULL_TREE;
12485 return build_call_expr (fn, 3, dest, src, len);
12488 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12489 are the arguments to the call. */
12491 static tree
12492 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12494 tree fn;
12495 const char *p;
12497 if (!validate_arg (dest, POINTER_TYPE)
12498 || !validate_arg (src, POINTER_TYPE)
12499 || !validate_arg (size, INTEGER_TYPE))
12500 return NULL_TREE;
12502 p = c_getstr (src);
12503 /* If the SRC parameter is "", return DEST. */
12504 if (p && *p == '\0')
12505 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12507 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12508 return NULL_TREE;
12510 /* If __builtin_strcat_chk is used, assume strcat is available. */
12511 fn = built_in_decls[BUILT_IN_STRCAT];
12512 if (!fn)
12513 return NULL_TREE;
12515 return build_call_expr (fn, 2, dest, src);
12518 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12519 LEN, and SIZE. */
12521 static tree
12522 fold_builtin_strncat_chk (tree fndecl,
12523 tree dest, tree src, tree len, tree size)
12525 tree fn;
12526 const char *p;
12528 if (!validate_arg (dest, POINTER_TYPE)
12529 || !validate_arg (src, POINTER_TYPE)
12530 || !validate_arg (size, INTEGER_TYPE)
12531 || !validate_arg (size, INTEGER_TYPE))
12532 return NULL_TREE;
12534 p = c_getstr (src);
12535 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12536 if (p && *p == '\0')
12537 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12538 else if (integer_zerop (len))
12539 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12541 if (! host_integerp (size, 1))
12542 return NULL_TREE;
12544 if (! integer_all_onesp (size))
12546 tree src_len = c_strlen (src, 1);
12547 if (src_len
12548 && host_integerp (src_len, 1)
12549 && host_integerp (len, 1)
12550 && ! tree_int_cst_lt (len, src_len))
12552 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12553 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12554 if (!fn)
12555 return NULL_TREE;
12557 return build_call_expr (fn, 3, dest, src, size);
12559 return NULL_TREE;
12562 /* If __builtin_strncat_chk is used, assume strncat is available. */
12563 fn = built_in_decls[BUILT_IN_STRNCAT];
12564 if (!fn)
12565 return NULL_TREE;
12567 return build_call_expr (fn, 3, dest, src, len);
12570 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12571 a normal call should be emitted rather than expanding the function
12572 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12574 static tree
12575 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12577 tree dest, size, len, fn, fmt, flag;
12578 const char *fmt_str;
12579 int nargs = call_expr_nargs (exp);
12581 /* Verify the required arguments in the original call. */
12582 if (nargs < 4)
12583 return NULL_TREE;
12584 dest = CALL_EXPR_ARG (exp, 0);
12585 if (!validate_arg (dest, POINTER_TYPE))
12586 return NULL_TREE;
12587 flag = CALL_EXPR_ARG (exp, 1);
12588 if (!validate_arg (flag, INTEGER_TYPE))
12589 return NULL_TREE;
12590 size = CALL_EXPR_ARG (exp, 2);
12591 if (!validate_arg (size, INTEGER_TYPE))
12592 return NULL_TREE;
12593 fmt = CALL_EXPR_ARG (exp, 3);
12594 if (!validate_arg (fmt, POINTER_TYPE))
12595 return NULL_TREE;
12597 if (! host_integerp (size, 1))
12598 return NULL_TREE;
12600 len = NULL_TREE;
12602 if (!init_target_chars ())
12603 return NULL_TREE;
12605 /* Check whether the format is a literal string constant. */
12606 fmt_str = c_getstr (fmt);
12607 if (fmt_str != NULL)
12609 /* If the format doesn't contain % args or %%, we know the size. */
12610 if (strchr (fmt_str, target_percent) == 0)
12612 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12613 len = build_int_cstu (size_type_node, strlen (fmt_str));
12615 /* If the format is "%s" and first ... argument is a string literal,
12616 we know the size too. */
12617 else if (fcode == BUILT_IN_SPRINTF_CHK
12618 && strcmp (fmt_str, target_percent_s) == 0)
12620 tree arg;
12622 if (nargs == 5)
12624 arg = CALL_EXPR_ARG (exp, 4);
12625 if (validate_arg (arg, POINTER_TYPE))
12627 len = c_strlen (arg, 1);
12628 if (! len || ! host_integerp (len, 1))
12629 len = NULL_TREE;
12635 if (! integer_all_onesp (size))
12637 if (! len || ! tree_int_cst_lt (len, size))
12638 return NULL_TREE;
12641 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12642 or if format doesn't contain % chars or is "%s". */
12643 if (! integer_zerop (flag))
12645 if (fmt_str == NULL)
12646 return NULL_TREE;
12647 if (strchr (fmt_str, target_percent) != NULL
12648 && strcmp (fmt_str, target_percent_s))
12649 return NULL_TREE;
12652 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12653 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12654 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12655 if (!fn)
12656 return NULL_TREE;
12658 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12661 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12662 a normal call should be emitted rather than expanding the function
12663 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12664 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12665 passed as second argument. */
12667 tree
12668 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12669 enum built_in_function fcode)
12671 tree dest, size, len, fn, fmt, flag;
12672 const char *fmt_str;
12674 /* Verify the required arguments in the original call. */
12675 if (call_expr_nargs (exp) < 5)
12676 return NULL_TREE;
12677 dest = CALL_EXPR_ARG (exp, 0);
12678 if (!validate_arg (dest, POINTER_TYPE))
12679 return NULL_TREE;
12680 len = CALL_EXPR_ARG (exp, 1);
12681 if (!validate_arg (len, INTEGER_TYPE))
12682 return NULL_TREE;
12683 flag = CALL_EXPR_ARG (exp, 2);
12684 if (!validate_arg (flag, INTEGER_TYPE))
12685 return NULL_TREE;
12686 size = CALL_EXPR_ARG (exp, 3);
12687 if (!validate_arg (size, INTEGER_TYPE))
12688 return NULL_TREE;
12689 fmt = CALL_EXPR_ARG (exp, 4);
12690 if (!validate_arg (fmt, POINTER_TYPE))
12691 return NULL_TREE;
12693 if (! host_integerp (size, 1))
12694 return NULL_TREE;
12696 if (! integer_all_onesp (size))
12698 if (! host_integerp (len, 1))
12700 /* If LEN is not constant, try MAXLEN too.
12701 For MAXLEN only allow optimizing into non-_ocs function
12702 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12703 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12704 return NULL_TREE;
12706 else
12707 maxlen = len;
12709 if (tree_int_cst_lt (size, maxlen))
12710 return NULL_TREE;
12713 if (!init_target_chars ())
12714 return NULL_TREE;
12716 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12717 or if format doesn't contain % chars or is "%s". */
12718 if (! integer_zerop (flag))
12720 fmt_str = c_getstr (fmt);
12721 if (fmt_str == NULL)
12722 return NULL_TREE;
12723 if (strchr (fmt_str, target_percent) != NULL
12724 && strcmp (fmt_str, target_percent_s))
12725 return NULL_TREE;
12728 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12729 available. */
12730 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12731 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12732 if (!fn)
12733 return NULL_TREE;
12735 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12738 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12739 FMT and ARG are the arguments to the call; we don't fold cases with
12740 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12742 Return NULL_TREE if no simplification was possible, otherwise return the
12743 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12744 code of the function to be simplified. */
12746 static tree
12747 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12748 enum built_in_function fcode)
12750 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12751 const char *fmt_str = NULL;
12753 /* If the return value is used, don't do the transformation. */
12754 if (! ignore)
12755 return NULL_TREE;
12757 /* Verify the required arguments in the original call. */
12758 if (!validate_arg (fmt, POINTER_TYPE))
12759 return NULL_TREE;
12761 /* Check whether the format is a literal string constant. */
12762 fmt_str = c_getstr (fmt);
12763 if (fmt_str == NULL)
12764 return NULL_TREE;
12766 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12768 /* If we're using an unlocked function, assume the other
12769 unlocked functions exist explicitly. */
12770 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12771 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12773 else
12775 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12776 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12779 if (!init_target_chars ())
12780 return NULL_TREE;
12782 if (strcmp (fmt_str, target_percent_s) == 0
12783 || strchr (fmt_str, target_percent) == NULL)
12785 const char *str;
12787 if (strcmp (fmt_str, target_percent_s) == 0)
12789 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12790 return NULL_TREE;
12792 if (!arg || !validate_arg (arg, POINTER_TYPE))
12793 return NULL_TREE;
12795 str = c_getstr (arg);
12796 if (str == NULL)
12797 return NULL_TREE;
12799 else
12801 /* The format specifier doesn't contain any '%' characters. */
12802 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12803 && arg)
12804 return NULL_TREE;
12805 str = fmt_str;
12808 /* If the string was "", printf does nothing. */
12809 if (str[0] == '\0')
12810 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12812 /* If the string has length of 1, call putchar. */
12813 if (str[1] == '\0')
12815 /* Given printf("c"), (where c is any one character,)
12816 convert "c"[0] to an int and pass that to the replacement
12817 function. */
12818 newarg = build_int_cst (NULL_TREE, str[0]);
12819 if (fn_putchar)
12820 call = build_call_expr (fn_putchar, 1, newarg);
12822 else
12824 /* If the string was "string\n", call puts("string"). */
12825 size_t len = strlen (str);
12826 if ((unsigned char)str[len - 1] == target_newline)
12828 /* Create a NUL-terminated string that's one char shorter
12829 than the original, stripping off the trailing '\n'. */
12830 char *newstr = XALLOCAVEC (char, len);
12831 memcpy (newstr, str, len - 1);
12832 newstr[len - 1] = 0;
12834 newarg = build_string_literal (len, newstr);
12835 if (fn_puts)
12836 call = build_call_expr (fn_puts, 1, newarg);
12838 else
12839 /* We'd like to arrange to call fputs(string,stdout) here,
12840 but we need stdout and don't have a way to get it yet. */
12841 return NULL_TREE;
12845 /* The other optimizations can be done only on the non-va_list variants. */
12846 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12847 return NULL_TREE;
12849 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12850 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12852 if (!arg || !validate_arg (arg, POINTER_TYPE))
12853 return NULL_TREE;
12854 if (fn_puts)
12855 call = build_call_expr (fn_puts, 1, arg);
12858 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12859 else if (strcmp (fmt_str, target_percent_c) == 0)
12861 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12862 return NULL_TREE;
12863 if (fn_putchar)
12864 call = build_call_expr (fn_putchar, 1, arg);
12867 if (!call)
12868 return NULL_TREE;
12870 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12873 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12874 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12875 more than 3 arguments, and ARG may be null in the 2-argument case.
12877 Return NULL_TREE if no simplification was possible, otherwise return the
12878 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12879 code of the function to be simplified. */
12881 static tree
12882 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12883 enum built_in_function fcode)
12885 tree fn_fputc, fn_fputs, call = NULL_TREE;
12886 const char *fmt_str = NULL;
12888 /* If the return value is used, don't do the transformation. */
12889 if (! ignore)
12890 return NULL_TREE;
12892 /* Verify the required arguments in the original call. */
12893 if (!validate_arg (fp, POINTER_TYPE))
12894 return NULL_TREE;
12895 if (!validate_arg (fmt, POINTER_TYPE))
12896 return NULL_TREE;
12898 /* Check whether the format is a literal string constant. */
12899 fmt_str = c_getstr (fmt);
12900 if (fmt_str == NULL)
12901 return NULL_TREE;
12903 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12905 /* If we're using an unlocked function, assume the other
12906 unlocked functions exist explicitly. */
12907 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12908 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12910 else
12912 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12913 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12916 if (!init_target_chars ())
12917 return NULL_TREE;
12919 /* If the format doesn't contain % args or %%, use strcpy. */
12920 if (strchr (fmt_str, target_percent) == NULL)
12922 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12923 && arg)
12924 return NULL_TREE;
12926 /* If the format specifier was "", fprintf does nothing. */
12927 if (fmt_str[0] == '\0')
12929 /* If FP has side-effects, just wait until gimplification is
12930 done. */
12931 if (TREE_SIDE_EFFECTS (fp))
12932 return NULL_TREE;
12934 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12937 /* When "string" doesn't contain %, replace all cases of
12938 fprintf (fp, string) with fputs (string, fp). The fputs
12939 builtin will take care of special cases like length == 1. */
12940 if (fn_fputs)
12941 call = build_call_expr (fn_fputs, 2, fmt, fp);
12944 /* The other optimizations can be done only on the non-va_list variants. */
12945 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12946 return NULL_TREE;
12948 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12949 else if (strcmp (fmt_str, target_percent_s) == 0)
12951 if (!arg || !validate_arg (arg, POINTER_TYPE))
12952 return NULL_TREE;
12953 if (fn_fputs)
12954 call = build_call_expr (fn_fputs, 2, arg, fp);
12957 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12958 else if (strcmp (fmt_str, target_percent_c) == 0)
12960 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12961 return NULL_TREE;
12962 if (fn_fputc)
12963 call = build_call_expr (fn_fputc, 2, arg, fp);
12966 if (!call)
12967 return NULL_TREE;
12968 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12971 /* Initialize format string characters in the target charset. */
12973 static bool
12974 init_target_chars (void)
12976 static bool init;
12977 if (!init)
12979 target_newline = lang_hooks.to_target_charset ('\n');
12980 target_percent = lang_hooks.to_target_charset ('%');
12981 target_c = lang_hooks.to_target_charset ('c');
12982 target_s = lang_hooks.to_target_charset ('s');
12983 if (target_newline == 0 || target_percent == 0 || target_c == 0
12984 || target_s == 0)
12985 return false;
12987 target_percent_c[0] = target_percent;
12988 target_percent_c[1] = target_c;
12989 target_percent_c[2] = '\0';
12991 target_percent_s[0] = target_percent;
12992 target_percent_s[1] = target_s;
12993 target_percent_s[2] = '\0';
12995 target_percent_s_newline[0] = target_percent;
12996 target_percent_s_newline[1] = target_s;
12997 target_percent_s_newline[2] = target_newline;
12998 target_percent_s_newline[3] = '\0';
13000 init = true;
13002 return true;
13005 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13006 and no overflow/underflow occurred. INEXACT is true if M was not
13007 exactly calculated. TYPE is the tree type for the result. This
13008 function assumes that you cleared the MPFR flags and then
13009 calculated M to see if anything subsequently set a flag prior to
13010 entering this function. Return NULL_TREE if any checks fail. */
13012 static tree
13013 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13015 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13016 overflow/underflow occurred. If -frounding-math, proceed iff the
13017 result of calling FUNC was exact. */
13018 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13019 && (!flag_rounding_math || !inexact))
13021 REAL_VALUE_TYPE rr;
13023 real_from_mpfr (&rr, m, type, GMP_RNDN);
13024 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13025 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13026 but the mpft_t is not, then we underflowed in the
13027 conversion. */
13028 if (real_isfinite (&rr)
13029 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13031 REAL_VALUE_TYPE rmode;
13033 real_convert (&rmode, TYPE_MODE (type), &rr);
13034 /* Proceed iff the specified mode can hold the value. */
13035 if (real_identical (&rmode, &rr))
13036 return build_real (type, rmode);
13039 return NULL_TREE;
13042 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13043 FUNC on it and return the resulting value as a tree with type TYPE.
13044 If MIN and/or MAX are not NULL, then the supplied ARG must be
13045 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13046 acceptable values, otherwise they are not. The mpfr precision is
13047 set to the precision of TYPE. We assume that function FUNC returns
13048 zero if the result could be calculated exactly within the requested
13049 precision. */
13051 static tree
13052 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13053 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13054 bool inclusive)
13056 tree result = NULL_TREE;
13058 STRIP_NOPS (arg);
13060 /* To proceed, MPFR must exactly represent the target floating point
13061 format, which only happens when the target base equals two. */
13062 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13063 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13065 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13067 if (real_isfinite (ra)
13068 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13069 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13071 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13072 const int prec = fmt->p;
13073 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13074 int inexact;
13075 mpfr_t m;
13077 mpfr_init2 (m, prec);
13078 mpfr_from_real (m, ra, GMP_RNDN);
13079 mpfr_clear_flags ();
13080 inexact = func (m, m, rnd);
13081 result = do_mpfr_ckconv (m, type, inexact);
13082 mpfr_clear (m);
13086 return result;
13089 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13090 FUNC on it and return the resulting value as a tree with type TYPE.
13091 The mpfr precision is set to the precision of TYPE. We assume that
13092 function FUNC returns zero if the result could be calculated
13093 exactly within the requested precision. */
13095 static tree
13096 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13097 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13099 tree result = NULL_TREE;
13101 STRIP_NOPS (arg1);
13102 STRIP_NOPS (arg2);
13104 /* To proceed, MPFR must exactly represent the target floating point
13105 format, which only happens when the target base equals two. */
13106 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13107 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13108 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13110 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13111 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13113 if (real_isfinite (ra1) && real_isfinite (ra2))
13115 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13116 const int prec = fmt->p;
13117 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13118 int inexact;
13119 mpfr_t m1, m2;
13121 mpfr_inits2 (prec, m1, m2, NULL);
13122 mpfr_from_real (m1, ra1, GMP_RNDN);
13123 mpfr_from_real (m2, ra2, GMP_RNDN);
13124 mpfr_clear_flags ();
13125 inexact = func (m1, m1, m2, rnd);
13126 result = do_mpfr_ckconv (m1, type, inexact);
13127 mpfr_clears (m1, m2, NULL);
13131 return result;
13134 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13135 FUNC on it and return the resulting value as a tree with type TYPE.
13136 The mpfr precision is set to the precision of TYPE. We assume that
13137 function FUNC returns zero if the result could be calculated
13138 exactly within the requested precision. */
13140 static tree
13141 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13142 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13144 tree result = NULL_TREE;
13146 STRIP_NOPS (arg1);
13147 STRIP_NOPS (arg2);
13148 STRIP_NOPS (arg3);
13150 /* To proceed, MPFR must exactly represent the target floating point
13151 format, which only happens when the target base equals two. */
13152 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13153 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13154 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13155 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13157 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13158 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13159 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13161 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13163 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13164 const int prec = fmt->p;
13165 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13166 int inexact;
13167 mpfr_t m1, m2, m3;
13169 mpfr_inits2 (prec, m1, m2, m3, NULL);
13170 mpfr_from_real (m1, ra1, GMP_RNDN);
13171 mpfr_from_real (m2, ra2, GMP_RNDN);
13172 mpfr_from_real (m3, ra3, GMP_RNDN);
13173 mpfr_clear_flags ();
13174 inexact = func (m1, m1, m2, m3, rnd);
13175 result = do_mpfr_ckconv (m1, type, inexact);
13176 mpfr_clears (m1, m2, m3, NULL);
13180 return result;
13183 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13184 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13185 If ARG_SINP and ARG_COSP are NULL then the result is returned
13186 as a complex value.
13187 The type is taken from the type of ARG and is used for setting the
13188 precision of the calculation and results. */
13190 static tree
13191 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13193 tree const type = TREE_TYPE (arg);
13194 tree result = NULL_TREE;
13196 STRIP_NOPS (arg);
13198 /* To proceed, MPFR must exactly represent the target floating point
13199 format, which only happens when the target base equals two. */
13200 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13201 && TREE_CODE (arg) == REAL_CST
13202 && !TREE_OVERFLOW (arg))
13204 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13206 if (real_isfinite (ra))
13208 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13209 const int prec = fmt->p;
13210 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13211 tree result_s, result_c;
13212 int inexact;
13213 mpfr_t m, ms, mc;
13215 mpfr_inits2 (prec, m, ms, mc, NULL);
13216 mpfr_from_real (m, ra, GMP_RNDN);
13217 mpfr_clear_flags ();
13218 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13219 result_s = do_mpfr_ckconv (ms, type, inexact);
13220 result_c = do_mpfr_ckconv (mc, type, inexact);
13221 mpfr_clears (m, ms, mc, NULL);
13222 if (result_s && result_c)
13224 /* If we are to return in a complex value do so. */
13225 if (!arg_sinp && !arg_cosp)
13226 return build_complex (build_complex_type (type),
13227 result_c, result_s);
13229 /* Dereference the sin/cos pointer arguments. */
13230 arg_sinp = build_fold_indirect_ref (arg_sinp);
13231 arg_cosp = build_fold_indirect_ref (arg_cosp);
13232 /* Proceed if valid pointer type were passed in. */
13233 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13234 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13236 /* Set the values. */
13237 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13238 result_s);
13239 TREE_SIDE_EFFECTS (result_s) = 1;
13240 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13241 result_c);
13242 TREE_SIDE_EFFECTS (result_c) = 1;
13243 /* Combine the assignments into a compound expr. */
13244 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13245 result_s, result_c));
13250 return result;
13253 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13254 two-argument mpfr order N Bessel function FUNC on them and return
13255 the resulting value as a tree with type TYPE. The mpfr precision
13256 is set to the precision of TYPE. We assume that function FUNC
13257 returns zero if the result could be calculated exactly within the
13258 requested precision. */
13259 static tree
13260 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13261 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13262 const REAL_VALUE_TYPE *min, bool inclusive)
13264 tree result = NULL_TREE;
13266 STRIP_NOPS (arg1);
13267 STRIP_NOPS (arg2);
13269 /* To proceed, MPFR must exactly represent the target floating point
13270 format, which only happens when the target base equals two. */
13271 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13272 && host_integerp (arg1, 0)
13273 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13275 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13276 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13278 if (n == (long)n
13279 && real_isfinite (ra)
13280 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13282 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13283 const int prec = fmt->p;
13284 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13285 int inexact;
13286 mpfr_t m;
13288 mpfr_init2 (m, prec);
13289 mpfr_from_real (m, ra, GMP_RNDN);
13290 mpfr_clear_flags ();
13291 inexact = func (m, n, m, rnd);
13292 result = do_mpfr_ckconv (m, type, inexact);
13293 mpfr_clear (m);
13297 return result;
13300 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13301 the pointer *(ARG_QUO) and return the result. The type is taken
13302 from the type of ARG0 and is used for setting the precision of the
13303 calculation and results. */
13305 static tree
13306 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13308 tree const type = TREE_TYPE (arg0);
13309 tree result = NULL_TREE;
13311 STRIP_NOPS (arg0);
13312 STRIP_NOPS (arg1);
13314 /* To proceed, MPFR must exactly represent the target floating point
13315 format, which only happens when the target base equals two. */
13316 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13317 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13318 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13320 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13321 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13323 if (real_isfinite (ra0) && real_isfinite (ra1))
13325 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13326 const int prec = fmt->p;
13327 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13328 tree result_rem;
13329 long integer_quo;
13330 mpfr_t m0, m1;
13332 mpfr_inits2 (prec, m0, m1, NULL);
13333 mpfr_from_real (m0, ra0, GMP_RNDN);
13334 mpfr_from_real (m1, ra1, GMP_RNDN);
13335 mpfr_clear_flags ();
13336 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13337 /* Remquo is independent of the rounding mode, so pass
13338 inexact=0 to do_mpfr_ckconv(). */
13339 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13340 mpfr_clears (m0, m1, NULL);
13341 if (result_rem)
13343 /* MPFR calculates quo in the host's long so it may
13344 return more bits in quo than the target int can hold
13345 if sizeof(host long) > sizeof(target int). This can
13346 happen even for native compilers in LP64 mode. In
13347 these cases, modulo the quo value with the largest
13348 number that the target int can hold while leaving one
13349 bit for the sign. */
13350 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13351 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13353 /* Dereference the quo pointer argument. */
13354 arg_quo = build_fold_indirect_ref (arg_quo);
13355 /* Proceed iff a valid pointer type was passed in. */
13356 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13358 /* Set the value. */
13359 tree result_quo = fold_build2 (MODIFY_EXPR,
13360 TREE_TYPE (arg_quo), arg_quo,
13361 build_int_cst (NULL, integer_quo));
13362 TREE_SIDE_EFFECTS (result_quo) = 1;
13363 /* Combine the quo assignment with the rem. */
13364 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13365 result_quo, result_rem));
13370 return result;
13373 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13374 resulting value as a tree with type TYPE. The mpfr precision is
13375 set to the precision of TYPE. We assume that this mpfr function
13376 returns zero if the result could be calculated exactly within the
13377 requested precision. In addition, the integer pointer represented
13378 by ARG_SG will be dereferenced and set to the appropriate signgam
13379 (-1,1) value. */
13381 static tree
13382 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13384 tree result = NULL_TREE;
13386 STRIP_NOPS (arg);
13388 /* To proceed, MPFR must exactly represent the target floating point
13389 format, which only happens when the target base equals two. Also
13390 verify ARG is a constant and that ARG_SG is an int pointer. */
13391 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13392 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13393 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13394 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13396 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13398 /* In addition to NaN and Inf, the argument cannot be zero or a
13399 negative integer. */
13400 if (real_isfinite (ra)
13401 && ra->cl != rvc_zero
13402 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13404 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13405 const int prec = fmt->p;
13406 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13407 int inexact, sg;
13408 mpfr_t m;
13409 tree result_lg;
13411 mpfr_init2 (m, prec);
13412 mpfr_from_real (m, ra, GMP_RNDN);
13413 mpfr_clear_flags ();
13414 inexact = mpfr_lgamma (m, &sg, m, rnd);
13415 result_lg = do_mpfr_ckconv (m, type, inexact);
13416 mpfr_clear (m);
13417 if (result_lg)
13419 tree result_sg;
13421 /* Dereference the arg_sg pointer argument. */
13422 arg_sg = build_fold_indirect_ref (arg_sg);
13423 /* Assign the signgam value into *arg_sg. */
13424 result_sg = fold_build2 (MODIFY_EXPR,
13425 TREE_TYPE (arg_sg), arg_sg,
13426 build_int_cst (NULL, sg));
13427 TREE_SIDE_EFFECTS (result_sg) = 1;
13428 /* Combine the signgam assignment with the lgamma result. */
13429 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13430 result_sg, result_lg));
13435 return result;
13438 /* FIXME tuples.
13439 The functions below provide an alternate interface for folding
13440 builtin function calls presented as GIMPLE_CALL statements rather
13441 than as CALL_EXPRs. The folded result is still expressed as a
13442 tree. There is too much code duplication in the handling of
13443 varargs functions, and a more intrusive re-factoring would permit
13444 better sharing of code between the tree and statement-based
13445 versions of these functions. */
13447 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13448 along with N new arguments specified as the "..." parameters. SKIP
13449 is the number of arguments in STMT to be omitted. This function is used
13450 to do varargs-to-varargs transformations. */
13452 static tree
13453 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13455 int oldnargs = gimple_call_num_args (stmt);
13456 int nargs = oldnargs - skip + n;
13457 tree fntype = TREE_TYPE (fndecl);
13458 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13459 tree *buffer;
13460 int i, j;
13461 va_list ap;
13463 buffer = XALLOCAVEC (tree, nargs);
13464 va_start (ap, n);
13465 for (i = 0; i < n; i++)
13466 buffer[i] = va_arg (ap, tree);
13467 va_end (ap);
13468 for (j = skip; j < oldnargs; j++, i++)
13469 buffer[i] = gimple_call_arg (stmt, j);
13471 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13474 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13475 a normal call should be emitted rather than expanding the function
13476 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13478 static tree
13479 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13481 tree dest, size, len, fn, fmt, flag;
13482 const char *fmt_str;
13483 int nargs = gimple_call_num_args (stmt);
13485 /* Verify the required arguments in the original call. */
13486 if (nargs < 4)
13487 return NULL_TREE;
13488 dest = gimple_call_arg (stmt, 0);
13489 if (!validate_arg (dest, POINTER_TYPE))
13490 return NULL_TREE;
13491 flag = gimple_call_arg (stmt, 1);
13492 if (!validate_arg (flag, INTEGER_TYPE))
13493 return NULL_TREE;
13494 size = gimple_call_arg (stmt, 2);
13495 if (!validate_arg (size, INTEGER_TYPE))
13496 return NULL_TREE;
13497 fmt = gimple_call_arg (stmt, 3);
13498 if (!validate_arg (fmt, POINTER_TYPE))
13499 return NULL_TREE;
13501 if (! host_integerp (size, 1))
13502 return NULL_TREE;
13504 len = NULL_TREE;
13506 if (!init_target_chars ())
13507 return NULL_TREE;
13509 /* Check whether the format is a literal string constant. */
13510 fmt_str = c_getstr (fmt);
13511 if (fmt_str != NULL)
13513 /* If the format doesn't contain % args or %%, we know the size. */
13514 if (strchr (fmt_str, target_percent) == 0)
13516 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13517 len = build_int_cstu (size_type_node, strlen (fmt_str));
13519 /* If the format is "%s" and first ... argument is a string literal,
13520 we know the size too. */
13521 else if (fcode == BUILT_IN_SPRINTF_CHK
13522 && strcmp (fmt_str, target_percent_s) == 0)
13524 tree arg;
13526 if (nargs == 5)
13528 arg = gimple_call_arg (stmt, 4);
13529 if (validate_arg (arg, POINTER_TYPE))
13531 len = c_strlen (arg, 1);
13532 if (! len || ! host_integerp (len, 1))
13533 len = NULL_TREE;
13539 if (! integer_all_onesp (size))
13541 if (! len || ! tree_int_cst_lt (len, size))
13542 return NULL_TREE;
13545 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13546 or if format doesn't contain % chars or is "%s". */
13547 if (! integer_zerop (flag))
13549 if (fmt_str == NULL)
13550 return NULL_TREE;
13551 if (strchr (fmt_str, target_percent) != NULL
13552 && strcmp (fmt_str, target_percent_s))
13553 return NULL_TREE;
13556 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13557 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13558 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13559 if (!fn)
13560 return NULL_TREE;
13562 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13565 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13566 a normal call should be emitted rather than expanding the function
13567 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13568 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13569 passed as second argument. */
13571 tree
13572 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13573 enum built_in_function fcode)
13575 tree dest, size, len, fn, fmt, flag;
13576 const char *fmt_str;
13578 /* Verify the required arguments in the original call. */
13579 if (gimple_call_num_args (stmt) < 5)
13580 return NULL_TREE;
13581 dest = gimple_call_arg (stmt, 0);
13582 if (!validate_arg (dest, POINTER_TYPE))
13583 return NULL_TREE;
13584 len = gimple_call_arg (stmt, 1);
13585 if (!validate_arg (len, INTEGER_TYPE))
13586 return NULL_TREE;
13587 flag = gimple_call_arg (stmt, 2);
13588 if (!validate_arg (flag, INTEGER_TYPE))
13589 return NULL_TREE;
13590 size = gimple_call_arg (stmt, 3);
13591 if (!validate_arg (size, INTEGER_TYPE))
13592 return NULL_TREE;
13593 fmt = gimple_call_arg (stmt, 4);
13594 if (!validate_arg (fmt, POINTER_TYPE))
13595 return NULL_TREE;
13597 if (! host_integerp (size, 1))
13598 return NULL_TREE;
13600 if (! integer_all_onesp (size))
13602 if (! host_integerp (len, 1))
13604 /* If LEN is not constant, try MAXLEN too.
13605 For MAXLEN only allow optimizing into non-_ocs function
13606 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13607 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13608 return NULL_TREE;
13610 else
13611 maxlen = len;
13613 if (tree_int_cst_lt (size, maxlen))
13614 return NULL_TREE;
13617 if (!init_target_chars ())
13618 return NULL_TREE;
13620 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13621 or if format doesn't contain % chars or is "%s". */
13622 if (! integer_zerop (flag))
13624 fmt_str = c_getstr (fmt);
13625 if (fmt_str == NULL)
13626 return NULL_TREE;
13627 if (strchr (fmt_str, target_percent) != NULL
13628 && strcmp (fmt_str, target_percent_s))
13629 return NULL_TREE;
13632 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13633 available. */
13634 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13635 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13636 if (!fn)
13637 return NULL_TREE;
13639 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13642 /* Builtins with folding operations that operate on "..." arguments
13643 need special handling; we need to store the arguments in a convenient
13644 data structure before attempting any folding. Fortunately there are
13645 only a few builtins that fall into this category. FNDECL is the
13646 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13647 result of the function call is ignored. */
13649 static tree
13650 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13652 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13653 tree ret = NULL_TREE;
13655 switch (fcode)
13657 case BUILT_IN_SPRINTF_CHK:
13658 case BUILT_IN_VSPRINTF_CHK:
13659 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13660 break;
13662 case BUILT_IN_SNPRINTF_CHK:
13663 case BUILT_IN_VSNPRINTF_CHK:
13664 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13666 default:
13667 break;
13669 if (ret)
13671 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13672 TREE_NO_WARNING (ret) = 1;
13673 return ret;
13675 return NULL_TREE;
13678 /* A wrapper function for builtin folding that prevents warnings for
13679 "statement without effect" and the like, caused by removing the
13680 call node earlier than the warning is generated. */
13682 tree
13683 fold_call_stmt (gimple stmt, bool ignore)
13685 tree ret = NULL_TREE;
13686 tree fndecl = gimple_call_fndecl (stmt);
13687 if (fndecl
13688 && TREE_CODE (fndecl) == FUNCTION_DECL
13689 && DECL_BUILT_IN (fndecl)
13690 && !gimple_call_va_arg_pack_p (stmt))
13692 int nargs = gimple_call_num_args (stmt);
13694 if (avoid_folding_inline_builtin (fndecl))
13695 return NULL_TREE;
13696 /* FIXME: Don't use a list in this interface. */
13697 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13699 tree arglist = NULL_TREE;
13700 int i;
13701 for (i = nargs - 1; i >= 0; i--)
13702 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13703 return targetm.fold_builtin (fndecl, arglist, ignore);
13705 else
13707 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13709 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13710 int i;
13711 for (i = 0; i < nargs; i++)
13712 args[i] = gimple_call_arg (stmt, i);
13713 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13715 if (!ret)
13716 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13717 if (ret)
13719 /* Propagate location information from original call to
13720 expansion of builtin. Otherwise things like
13721 maybe_emit_chk_warning, that operate on the expansion
13722 of a builtin, will use the wrong location information. */
13723 if (gimple_has_location (stmt))
13725 tree realret = ret;
13726 if (TREE_CODE (ret) == NOP_EXPR)
13727 realret = TREE_OPERAND (ret, 0);
13728 if (CAN_HAVE_LOCATION_P (realret)
13729 && !EXPR_HAS_LOCATION (realret))
13730 SET_EXPR_LOCATION (realret, gimple_location (stmt));
13731 return realret;
13733 return ret;
13737 return NULL_TREE;