2008-12-09 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / builtins.c
blob745a125fe70772f540f93633d9813f3e2dbce315
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[4]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names[(int) END_BUILTINS] =
69 #include "builtins.def"
71 #undef DEF_BUILTIN
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 tree built_in_decls[(int) END_BUILTINS];
76 /* Declarations used when constructing the builtin implicitly in the compiler.
77 It may be NULL_TREE when this is invalid (for instance runtime is not
78 required to implement the function call in all cases). */
79 tree implicit_built_in_decls[(int) END_BUILTINS];
81 static const char *c_getstr (tree);
82 static rtx c_readstr (const char *, enum machine_mode);
83 static int target_char_cast (tree, char *);
84 static rtx get_memory_rtx (tree, tree);
85 static int apply_args_size (void);
86 static int apply_result_size (void);
87 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
88 static rtx result_vector (int, rtx);
89 #endif
90 static void expand_builtin_update_setjmp_buf (rtx);
91 static void expand_builtin_prefetch (tree);
92 static rtx expand_builtin_apply_args (void);
93 static rtx expand_builtin_apply_args_1 (void);
94 static rtx expand_builtin_apply (rtx, rtx, rtx);
95 static void expand_builtin_return (rtx);
96 static enum type_class type_to_class (tree);
97 static rtx expand_builtin_classify_type (tree);
98 static void expand_errno_check (tree, rtx);
99 static rtx expand_builtin_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_args_info (tree);
108 static rtx expand_builtin_next_arg (void);
109 static rtx expand_builtin_va_start (tree);
110 static rtx expand_builtin_va_end (tree);
111 static rtx expand_builtin_va_copy (tree);
112 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
126 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_bcopy (tree, int);
129 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
131 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
133 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
134 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_alloca (tree, rtx);
143 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
144 static rtx expand_builtin_frame_address (tree, tree);
145 static rtx expand_builtin_fputs (tree, rtx, bool);
146 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
147 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
148 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
149 static tree stabilize_va_list (tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static tree fold_builtin_constant_p (tree);
152 static tree fold_builtin_expect (tree, tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (tree);
155 static tree fold_builtin_inf (tree, int);
156 static tree fold_builtin_nan (tree, tree, int);
157 static tree rewrite_call_expr (tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static bool integer_valued_real_p (tree);
160 static tree fold_trunc_transparent_mathfn (tree, tree);
161 static bool readonly_data_expr (tree);
162 static rtx expand_builtin_fabs (tree, rtx, rtx);
163 static rtx expand_builtin_signbit (tree, rtx);
164 static tree fold_builtin_sqrt (tree, tree);
165 static tree fold_builtin_cbrt (tree, tree);
166 static tree fold_builtin_pow (tree, tree, tree, tree);
167 static tree fold_builtin_powi (tree, tree, tree, tree);
168 static tree fold_builtin_cos (tree, tree, tree);
169 static tree fold_builtin_cosh (tree, tree, tree);
170 static tree fold_builtin_tan (tree, tree);
171 static tree fold_builtin_trunc (tree, tree);
172 static tree fold_builtin_floor (tree, tree);
173 static tree fold_builtin_ceil (tree, tree);
174 static tree fold_builtin_round (tree, tree);
175 static tree fold_builtin_int_roundingfn (tree, tree);
176 static tree fold_builtin_bitop (tree, tree);
177 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
178 static tree fold_builtin_strchr (tree, tree, tree);
179 static tree fold_builtin_memchr (tree, tree, tree, tree);
180 static tree fold_builtin_memcmp (tree, tree, tree);
181 static tree fold_builtin_strcmp (tree, tree);
182 static tree fold_builtin_strncmp (tree, tree, tree);
183 static tree fold_builtin_signbit (tree, tree);
184 static tree fold_builtin_copysign (tree, tree, tree, tree);
185 static tree fold_builtin_isascii (tree);
186 static tree fold_builtin_toascii (tree);
187 static tree fold_builtin_isdigit (tree);
188 static tree fold_builtin_fabs (tree, tree);
189 static tree fold_builtin_abs (tree, tree);
190 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
191 enum tree_code);
192 static tree fold_builtin_n (tree, tree *, int, bool);
193 static tree fold_builtin_0 (tree, bool);
194 static tree fold_builtin_1 (tree, tree, bool);
195 static tree fold_builtin_2 (tree, tree, tree, bool);
196 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
197 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
198 static tree fold_builtin_varargs (tree, tree, bool);
200 static tree fold_builtin_strpbrk (tree, tree, tree);
201 static tree fold_builtin_strstr (tree, tree, tree);
202 static tree fold_builtin_strrchr (tree, tree, tree);
203 static tree fold_builtin_strcat (tree, tree);
204 static tree fold_builtin_strncat (tree, tree, tree);
205 static tree fold_builtin_strspn (tree, tree);
206 static tree fold_builtin_strcspn (tree, tree);
207 static tree fold_builtin_sprintf (tree, tree, tree, int);
209 static rtx expand_builtin_object_size (tree);
210 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
211 enum built_in_function);
212 static void maybe_emit_chk_warning (tree, enum built_in_function);
213 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
214 static void maybe_emit_free_warning (tree);
215 static tree fold_builtin_object_size (tree, tree);
216 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
217 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
218 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
219 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
220 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
221 enum built_in_function);
222 static bool init_target_chars (void);
224 static unsigned HOST_WIDE_INT target_newline;
225 static unsigned HOST_WIDE_INT target_percent;
226 static unsigned HOST_WIDE_INT target_c;
227 static unsigned HOST_WIDE_INT target_s;
228 static char target_percent_c[3];
229 static char target_percent_s[3];
230 static char target_percent_s_newline[4];
231 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
232 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
233 static tree do_mpfr_arg2 (tree, tree, tree,
234 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
235 static tree do_mpfr_arg3 (tree, tree, tree, tree,
236 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
237 static tree do_mpfr_sincos (tree, tree, tree);
238 static tree do_mpfr_bessel_n (tree, tree, tree,
239 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
240 const REAL_VALUE_TYPE *, bool);
241 static tree do_mpfr_remquo (tree, tree, tree);
242 static tree do_mpfr_lgamma_r (tree, tree, tree);
244 /* Return true if NODE should be considered for inline expansion regardless
245 of the optimization level. This means whenever a function is invoked with
246 its "internal" name, which normally contains the prefix "__builtin". */
248 static bool called_as_built_in (tree node)
250 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
251 if (strncmp (name, "__builtin_", 10) == 0)
252 return true;
253 if (strncmp (name, "__sync_", 7) == 0)
254 return true;
255 return false;
258 /* Return the alignment in bits of EXP, an object.
259 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
260 guessed alignment e.g. from type alignment. */
263 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
265 unsigned int inner;
267 inner = max_align;
268 if (handled_component_p (exp))
270 HOST_WIDE_INT bitsize, bitpos;
271 tree offset;
272 enum machine_mode mode;
273 int unsignedp, volatilep;
275 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
276 &mode, &unsignedp, &volatilep, true);
277 if (bitpos)
278 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
279 while (offset)
281 tree next_offset;
283 if (TREE_CODE (offset) == PLUS_EXPR)
285 next_offset = TREE_OPERAND (offset, 0);
286 offset = TREE_OPERAND (offset, 1);
288 else
289 next_offset = NULL;
290 if (host_integerp (offset, 1))
292 /* Any overflow in calculating offset_bits won't change
293 the alignment. */
294 unsigned offset_bits
295 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
297 if (offset_bits)
298 inner = MIN (inner, (offset_bits & -offset_bits));
300 else if (TREE_CODE (offset) == MULT_EXPR
301 && host_integerp (TREE_OPERAND (offset, 1), 1))
303 /* Any overflow in calculating offset_factor won't change
304 the alignment. */
305 unsigned offset_factor
306 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
307 * BITS_PER_UNIT);
309 if (offset_factor)
310 inner = MIN (inner, (offset_factor & -offset_factor));
312 else
314 inner = MIN (inner, BITS_PER_UNIT);
315 break;
317 offset = next_offset;
320 if (DECL_P (exp))
321 align = MIN (inner, DECL_ALIGN (exp));
322 #ifdef CONSTANT_ALIGNMENT
323 else if (CONSTANT_CLASS_P (exp))
324 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
325 #endif
326 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
327 || TREE_CODE (exp) == INDIRECT_REF)
328 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
329 else
330 align = MIN (align, inner);
331 return MIN (align, max_align);
334 /* Return the alignment in bits of EXP, a pointer valued expression.
335 But don't return more than MAX_ALIGN no matter what.
336 The alignment returned is, by default, the alignment of the thing that
337 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
339 Otherwise, look at the expression to see if we can do better, i.e., if the
340 expression is actually pointing at an object whose alignment is tighter. */
343 get_pointer_alignment (tree exp, unsigned int max_align)
345 unsigned int align, inner;
347 /* We rely on TER to compute accurate alignment information. */
348 if (!(optimize && flag_tree_ter))
349 return 0;
351 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
352 return 0;
354 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
355 align = MIN (align, max_align);
357 while (1)
359 switch (TREE_CODE (exp))
361 CASE_CONVERT:
362 exp = TREE_OPERAND (exp, 0);
363 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
364 return align;
366 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
367 align = MIN (inner, max_align);
368 break;
370 case POINTER_PLUS_EXPR:
371 /* If sum of pointer + int, restrict our maximum alignment to that
372 imposed by the integer. If not, we can't do any better than
373 ALIGN. */
374 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
375 return align;
377 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
378 & (max_align / BITS_PER_UNIT - 1))
379 != 0)
380 max_align >>= 1;
382 exp = TREE_OPERAND (exp, 0);
383 break;
385 case ADDR_EXPR:
386 /* See what we are pointing at and look at its alignment. */
387 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
389 default:
390 return align;
395 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
396 way, because it could contain a zero byte in the middle.
397 TREE_STRING_LENGTH is the size of the character array, not the string.
399 ONLY_VALUE should be nonzero if the result is not going to be emitted
400 into the instruction stream and zero if it is going to be expanded.
401 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
402 is returned, otherwise NULL, since
403 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
404 evaluate the side-effects.
406 The value returned is of type `ssizetype'.
408 Unfortunately, string_constant can't access the values of const char
409 arrays with initializers, so neither can we do so here. */
411 tree
412 c_strlen (tree src, int only_value)
414 tree offset_node;
415 HOST_WIDE_INT offset;
416 int max;
417 const char *ptr;
419 STRIP_NOPS (src);
420 if (TREE_CODE (src) == COND_EXPR
421 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
423 tree len1, len2;
425 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
426 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
427 if (tree_int_cst_equal (len1, len2))
428 return len1;
431 if (TREE_CODE (src) == COMPOUND_EXPR
432 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
433 return c_strlen (TREE_OPERAND (src, 1), only_value);
435 src = string_constant (src, &offset_node);
436 if (src == 0)
437 return NULL_TREE;
439 max = TREE_STRING_LENGTH (src) - 1;
440 ptr = TREE_STRING_POINTER (src);
442 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
444 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
445 compute the offset to the following null if we don't know where to
446 start searching for it. */
447 int i;
449 for (i = 0; i < max; i++)
450 if (ptr[i] == 0)
451 return NULL_TREE;
453 /* We don't know the starting offset, but we do know that the string
454 has no internal zero bytes. We can assume that the offset falls
455 within the bounds of the string; otherwise, the programmer deserves
456 what he gets. Subtract the offset from the length of the string,
457 and return that. This would perhaps not be valid if we were dealing
458 with named arrays in addition to literal string constants. */
460 return size_diffop (size_int (max), offset_node);
463 /* We have a known offset into the string. Start searching there for
464 a null character if we can represent it as a single HOST_WIDE_INT. */
465 if (offset_node == 0)
466 offset = 0;
467 else if (! host_integerp (offset_node, 0))
468 offset = -1;
469 else
470 offset = tree_low_cst (offset_node, 0);
472 /* If the offset is known to be out of bounds, warn, and call strlen at
473 runtime. */
474 if (offset < 0 || offset > max)
476 /* Suppress multiple warnings for propagated constant strings. */
477 if (! TREE_NO_WARNING (src))
479 warning (0, "offset outside bounds of constant string");
480 TREE_NO_WARNING (src) = 1;
482 return NULL_TREE;
485 /* Use strlen to search for the first zero byte. Since any strings
486 constructed with build_string will have nulls appended, we win even
487 if we get handed something like (char[4])"abcd".
489 Since OFFSET is our starting index into the string, no further
490 calculation is needed. */
491 return ssize_int (strlen (ptr + offset));
494 /* Return a char pointer for a C string if it is a string constant
495 or sum of string constant and integer constant. */
497 static const char *
498 c_getstr (tree src)
500 tree offset_node;
502 src = string_constant (src, &offset_node);
503 if (src == 0)
504 return 0;
506 if (offset_node == 0)
507 return TREE_STRING_POINTER (src);
508 else if (!host_integerp (offset_node, 1)
509 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
510 return 0;
512 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
515 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
516 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
518 static rtx
519 c_readstr (const char *str, enum machine_mode mode)
521 HOST_WIDE_INT c[2];
522 HOST_WIDE_INT ch;
523 unsigned int i, j;
525 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
527 c[0] = 0;
528 c[1] = 0;
529 ch = 1;
530 for (i = 0; i < GET_MODE_SIZE (mode); i++)
532 j = i;
533 if (WORDS_BIG_ENDIAN)
534 j = GET_MODE_SIZE (mode) - i - 1;
535 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
536 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
537 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
538 j *= BITS_PER_UNIT;
539 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
541 if (ch)
542 ch = (unsigned char) str[i];
543 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
545 return immed_double_const (c[0], c[1], mode);
548 /* Cast a target constant CST to target CHAR and if that value fits into
549 host char type, return zero and put that value into variable pointed to by
550 P. */
552 static int
553 target_char_cast (tree cst, char *p)
555 unsigned HOST_WIDE_INT val, hostval;
557 if (!host_integerp (cst, 1)
558 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
559 return 1;
561 val = tree_low_cst (cst, 1);
562 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
563 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
565 hostval = val;
566 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
567 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
569 if (val != hostval)
570 return 1;
572 *p = hostval;
573 return 0;
576 /* Similar to save_expr, but assumes that arbitrary code is not executed
577 in between the multiple evaluations. In particular, we assume that a
578 non-addressable local variable will not be modified. */
580 static tree
581 builtin_save_expr (tree exp)
583 if (TREE_ADDRESSABLE (exp) == 0
584 && (TREE_CODE (exp) == PARM_DECL
585 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
586 return exp;
588 return save_expr (exp);
591 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
592 times to get the address of either a higher stack frame, or a return
593 address located within it (depending on FNDECL_CODE). */
595 static rtx
596 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
598 int i;
600 #ifdef INITIAL_FRAME_ADDRESS_RTX
601 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
602 #else
603 rtx tem;
605 /* For a zero count with __builtin_return_address, we don't care what
606 frame address we return, because target-specific definitions will
607 override us. Therefore frame pointer elimination is OK, and using
608 the soft frame pointer is OK.
610 For a nonzero count, or a zero count with __builtin_frame_address,
611 we require a stable offset from the current frame pointer to the
612 previous one, so we must use the hard frame pointer, and
613 we must disable frame pointer elimination. */
614 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
615 tem = frame_pointer_rtx;
616 else
618 tem = hard_frame_pointer_rtx;
620 /* Tell reload not to eliminate the frame pointer. */
621 crtl->accesses_prior_frames = 1;
623 #endif
625 /* Some machines need special handling before we can access
626 arbitrary frames. For example, on the SPARC, we must first flush
627 all register windows to the stack. */
628 #ifdef SETUP_FRAME_ADDRESSES
629 if (count > 0)
630 SETUP_FRAME_ADDRESSES ();
631 #endif
633 /* On the SPARC, the return address is not in the frame, it is in a
634 register. There is no way to access it off of the current frame
635 pointer, but it can be accessed off the previous frame pointer by
636 reading the value from the register window save area. */
637 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
638 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 count--;
640 #endif
642 /* Scan back COUNT frames to the specified frame. */
643 for (i = 0; i < count; i++)
645 /* Assume the dynamic chain pointer is in the word that the
646 frame address points to, unless otherwise specified. */
647 #ifdef DYNAMIC_CHAIN_ADDRESS
648 tem = DYNAMIC_CHAIN_ADDRESS (tem);
649 #endif
650 tem = memory_address (Pmode, tem);
651 tem = gen_frame_mem (Pmode, tem);
652 tem = copy_to_reg (tem);
655 /* For __builtin_frame_address, return what we've got. But, on
656 the SPARC for example, we may have to add a bias. */
657 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
658 #ifdef FRAME_ADDR_RTX
659 return FRAME_ADDR_RTX (tem);
660 #else
661 return tem;
662 #endif
664 /* For __builtin_return_address, get the return address from that frame. */
665 #ifdef RETURN_ADDR_RTX
666 tem = RETURN_ADDR_RTX (count, tem);
667 #else
668 tem = memory_address (Pmode,
669 plus_constant (tem, GET_MODE_SIZE (Pmode)));
670 tem = gen_frame_mem (Pmode, tem);
671 #endif
672 return tem;
675 /* Alias set used for setjmp buffer. */
676 static alias_set_type setjmp_alias_set = -1;
678 /* Construct the leading half of a __builtin_setjmp call. Control will
679 return to RECEIVER_LABEL. This is also called directly by the SJLJ
680 exception handling code. */
682 void
683 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
685 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
686 rtx stack_save;
687 rtx mem;
689 if (setjmp_alias_set == -1)
690 setjmp_alias_set = new_alias_set ();
692 buf_addr = convert_memory_address (Pmode, buf_addr);
694 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
696 /* We store the frame pointer and the address of receiver_label in
697 the buffer and use the rest of it for the stack save area, which
698 is machine-dependent. */
700 mem = gen_rtx_MEM (Pmode, buf_addr);
701 set_mem_alias_set (mem, setjmp_alias_set);
702 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
704 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
705 set_mem_alias_set (mem, setjmp_alias_set);
707 emit_move_insn (validize_mem (mem),
708 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
710 stack_save = gen_rtx_MEM (sa_mode,
711 plus_constant (buf_addr,
712 2 * GET_MODE_SIZE (Pmode)));
713 set_mem_alias_set (stack_save, setjmp_alias_set);
714 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
716 /* If there is further processing to do, do it. */
717 #ifdef HAVE_builtin_setjmp_setup
718 if (HAVE_builtin_setjmp_setup)
719 emit_insn (gen_builtin_setjmp_setup (buf_addr));
720 #endif
722 /* Tell optimize_save_area_alloca that extra work is going to
723 need to go on during alloca. */
724 cfun->calls_setjmp = 1;
726 /* We have a nonlocal label. */
727 cfun->has_nonlocal_label = 1;
730 /* Construct the trailing part of a __builtin_setjmp call. This is
731 also called directly by the SJLJ exception handling code. */
733 void
734 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
736 /* Clobber the FP when we get here, so we have to make sure it's
737 marked as used by this function. */
738 emit_use (hard_frame_pointer_rtx);
740 /* Mark the static chain as clobbered here so life information
741 doesn't get messed up for it. */
742 emit_clobber (static_chain_rtx);
744 /* Now put in the code to restore the frame pointer, and argument
745 pointer, if needed. */
746 #ifdef HAVE_nonlocal_goto
747 if (! HAVE_nonlocal_goto)
748 #endif
750 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
751 /* This might change the hard frame pointer in ways that aren't
752 apparent to early optimization passes, so force a clobber. */
753 emit_clobber (hard_frame_pointer_rtx);
756 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
757 if (fixed_regs[ARG_POINTER_REGNUM])
759 #ifdef ELIMINABLE_REGS
760 size_t i;
761 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
763 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
764 if (elim_regs[i].from == ARG_POINTER_REGNUM
765 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
766 break;
768 if (i == ARRAY_SIZE (elim_regs))
769 #endif
771 /* Now restore our arg pointer from the address at which it
772 was saved in our stack frame. */
773 emit_move_insn (crtl->args.internal_arg_pointer,
774 copy_to_reg (get_arg_pointer_save_area ()));
777 #endif
779 #ifdef HAVE_builtin_setjmp_receiver
780 if (HAVE_builtin_setjmp_receiver)
781 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
782 else
783 #endif
784 #ifdef HAVE_nonlocal_goto_receiver
785 if (HAVE_nonlocal_goto_receiver)
786 emit_insn (gen_nonlocal_goto_receiver ());
787 else
788 #endif
789 { /* Nothing */ }
791 /* We must not allow the code we just generated to be reordered by
792 scheduling. Specifically, the update of the frame pointer must
793 happen immediately, not later. */
794 emit_insn (gen_blockage ());
797 /* __builtin_longjmp is passed a pointer to an array of five words (not
798 all will be used on all machines). It operates similarly to the C
799 library function of the same name, but is more efficient. Much of
800 the code below is copied from the handling of non-local gotos. */
802 static void
803 expand_builtin_longjmp (rtx buf_addr, rtx value)
805 rtx fp, lab, stack, insn, last;
806 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
808 /* DRAP is needed for stack realign if longjmp is expanded to current
809 function */
810 if (SUPPORTS_STACK_ALIGNMENT)
811 crtl->need_drap = true;
813 if (setjmp_alias_set == -1)
814 setjmp_alias_set = new_alias_set ();
816 buf_addr = convert_memory_address (Pmode, buf_addr);
818 buf_addr = force_reg (Pmode, buf_addr);
820 /* We used to store value in static_chain_rtx, but that fails if pointers
821 are smaller than integers. We instead require that the user must pass
822 a second argument of 1, because that is what builtin_setjmp will
823 return. This also makes EH slightly more efficient, since we are no
824 longer copying around a value that we don't care about. */
825 gcc_assert (value == const1_rtx);
827 last = get_last_insn ();
828 #ifdef HAVE_builtin_longjmp
829 if (HAVE_builtin_longjmp)
830 emit_insn (gen_builtin_longjmp (buf_addr));
831 else
832 #endif
834 fp = gen_rtx_MEM (Pmode, buf_addr);
835 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
836 GET_MODE_SIZE (Pmode)));
838 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
839 2 * GET_MODE_SIZE (Pmode)));
840 set_mem_alias_set (fp, setjmp_alias_set);
841 set_mem_alias_set (lab, setjmp_alias_set);
842 set_mem_alias_set (stack, setjmp_alias_set);
844 /* Pick up FP, label, and SP from the block and jump. This code is
845 from expand_goto in stmt.c; see there for detailed comments. */
846 #ifdef HAVE_nonlocal_goto
847 if (HAVE_nonlocal_goto)
848 /* We have to pass a value to the nonlocal_goto pattern that will
849 get copied into the static_chain pointer, but it does not matter
850 what that value is, because builtin_setjmp does not use it. */
851 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
852 else
853 #endif
855 lab = copy_to_reg (lab);
857 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
858 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
860 emit_move_insn (hard_frame_pointer_rtx, fp);
861 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
863 emit_use (hard_frame_pointer_rtx);
864 emit_use (stack_pointer_rtx);
865 emit_indirect_jump (lab);
869 /* Search backwards and mark the jump insn as a non-local goto.
870 Note that this precludes the use of __builtin_longjmp to a
871 __builtin_setjmp target in the same function. However, we've
872 already cautioned the user that these functions are for
873 internal exception handling use only. */
874 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
876 gcc_assert (insn != last);
878 if (JUMP_P (insn))
880 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
881 break;
883 else if (CALL_P (insn))
884 break;
888 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
889 and the address of the save area. */
891 static rtx
892 expand_builtin_nonlocal_goto (tree exp)
894 tree t_label, t_save_area;
895 rtx r_label, r_save_area, r_fp, r_sp, insn;
897 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
898 return NULL_RTX;
900 t_label = CALL_EXPR_ARG (exp, 0);
901 t_save_area = CALL_EXPR_ARG (exp, 1);
903 r_label = expand_normal (t_label);
904 r_label = convert_memory_address (Pmode, r_label);
905 r_save_area = expand_normal (t_save_area);
906 r_save_area = convert_memory_address (Pmode, r_save_area);
907 /* Copy the address of the save location to a register just in case it was based
908 on the frame pointer. */
909 r_save_area = copy_to_reg (r_save_area);
910 r_fp = gen_rtx_MEM (Pmode, r_save_area);
911 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
912 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
914 crtl->has_nonlocal_goto = 1;
916 #ifdef HAVE_nonlocal_goto
917 /* ??? We no longer need to pass the static chain value, afaik. */
918 if (HAVE_nonlocal_goto)
919 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
920 else
921 #endif
923 r_label = copy_to_reg (r_label);
925 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
926 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
928 /* Restore frame pointer for containing function.
929 This sets the actual hard register used for the frame pointer
930 to the location of the function's incoming static chain info.
931 The non-local goto handler will then adjust it to contain the
932 proper value and reload the argument pointer, if needed. */
933 emit_move_insn (hard_frame_pointer_rtx, r_fp);
934 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
936 /* USE of hard_frame_pointer_rtx added for consistency;
937 not clear if really needed. */
938 emit_use (hard_frame_pointer_rtx);
939 emit_use (stack_pointer_rtx);
941 /* If the architecture is using a GP register, we must
942 conservatively assume that the target function makes use of it.
943 The prologue of functions with nonlocal gotos must therefore
944 initialize the GP register to the appropriate value, and we
945 must then make sure that this value is live at the point
946 of the jump. (Note that this doesn't necessarily apply
947 to targets with a nonlocal_goto pattern; they are free
948 to implement it in their own way. Note also that this is
949 a no-op if the GP register is a global invariant.) */
950 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
951 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
952 emit_use (pic_offset_table_rtx);
954 emit_indirect_jump (r_label);
957 /* Search backwards to the jump insn and mark it as a
958 non-local goto. */
959 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
961 if (JUMP_P (insn))
963 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
964 break;
966 else if (CALL_P (insn))
967 break;
970 return const0_rtx;
973 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
974 (not all will be used on all machines) that was passed to __builtin_setjmp.
975 It updates the stack pointer in that block to correspond to the current
976 stack pointer. */
978 static void
979 expand_builtin_update_setjmp_buf (rtx buf_addr)
981 enum machine_mode sa_mode = Pmode;
982 rtx stack_save;
985 #ifdef HAVE_save_stack_nonlocal
986 if (HAVE_save_stack_nonlocal)
987 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
988 #endif
989 #ifdef STACK_SAVEAREA_MODE
990 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
991 #endif
993 stack_save
994 = gen_rtx_MEM (sa_mode,
995 memory_address
996 (sa_mode,
997 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
999 #ifdef HAVE_setjmp
1000 if (HAVE_setjmp)
1001 emit_insn (gen_setjmp ());
1002 #endif
1004 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1007 /* Expand a call to __builtin_prefetch. For a target that does not support
1008 data prefetch, evaluate the memory address argument in case it has side
1009 effects. */
1011 static void
1012 expand_builtin_prefetch (tree exp)
1014 tree arg0, arg1, arg2;
1015 int nargs;
1016 rtx op0, op1, op2;
1018 if (!validate_arglist (exp, POINTER_TYPE, 0))
1019 return;
1021 arg0 = CALL_EXPR_ARG (exp, 0);
1023 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1024 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1025 locality). */
1026 nargs = call_expr_nargs (exp);
1027 if (nargs > 1)
1028 arg1 = CALL_EXPR_ARG (exp, 1);
1029 else
1030 arg1 = integer_zero_node;
1031 if (nargs > 2)
1032 arg2 = CALL_EXPR_ARG (exp, 2);
1033 else
1034 arg2 = build_int_cst (NULL_TREE, 3);
1036 /* Argument 0 is an address. */
1037 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1039 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1040 if (TREE_CODE (arg1) != INTEGER_CST)
1042 error ("second argument to %<__builtin_prefetch%> must be a constant");
1043 arg1 = integer_zero_node;
1045 op1 = expand_normal (arg1);
1046 /* Argument 1 must be either zero or one. */
1047 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1049 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1050 " using zero");
1051 op1 = const0_rtx;
1054 /* Argument 2 (locality) must be a compile-time constant int. */
1055 if (TREE_CODE (arg2) != INTEGER_CST)
1057 error ("third argument to %<__builtin_prefetch%> must be a constant");
1058 arg2 = integer_zero_node;
1060 op2 = expand_normal (arg2);
1061 /* Argument 2 must be 0, 1, 2, or 3. */
1062 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1064 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1065 op2 = const0_rtx;
1068 #ifdef HAVE_prefetch
1069 if (HAVE_prefetch)
1071 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1072 (op0,
1073 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1074 || (GET_MODE (op0) != Pmode))
1076 op0 = convert_memory_address (Pmode, op0);
1077 op0 = force_reg (Pmode, op0);
1079 emit_insn (gen_prefetch (op0, op1, op2));
1081 #endif
1083 /* Don't do anything with direct references to volatile memory, but
1084 generate code to handle other side effects. */
1085 if (!MEM_P (op0) && side_effects_p (op0))
1086 emit_insn (op0);
1089 /* Get a MEM rtx for expression EXP which is the address of an operand
1090 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1091 the maximum length of the block of memory that might be accessed or
1092 NULL if unknown. */
1094 static rtx
1095 get_memory_rtx (tree exp, tree len)
1097 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1098 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1100 /* Get an expression we can use to find the attributes to assign to MEM.
1101 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1102 we can. First remove any nops. */
1103 while (CONVERT_EXPR_P (exp)
1104 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1105 exp = TREE_OPERAND (exp, 0);
1107 if (TREE_CODE (exp) == ADDR_EXPR)
1108 exp = TREE_OPERAND (exp, 0);
1109 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1110 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1111 else
1112 exp = NULL;
1114 /* Honor attributes derived from exp, except for the alias set
1115 (as builtin stringops may alias with anything) and the size
1116 (as stringops may access multiple array elements). */
1117 if (exp)
1119 set_mem_attributes (mem, exp, 0);
1121 /* Allow the string and memory builtins to overflow from one
1122 field into another, see http://gcc.gnu.org/PR23561.
1123 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1124 memory accessed by the string or memory builtin will fit
1125 within the field. */
1126 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1128 tree mem_expr = MEM_EXPR (mem);
1129 HOST_WIDE_INT offset = -1, length = -1;
1130 tree inner = exp;
1132 while (TREE_CODE (inner) == ARRAY_REF
1133 || CONVERT_EXPR_P (inner)
1134 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1135 || TREE_CODE (inner) == SAVE_EXPR)
1136 inner = TREE_OPERAND (inner, 0);
1138 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1140 if (MEM_OFFSET (mem)
1141 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1142 offset = INTVAL (MEM_OFFSET (mem));
1144 if (offset >= 0 && len && host_integerp (len, 0))
1145 length = tree_low_cst (len, 0);
1147 while (TREE_CODE (inner) == COMPONENT_REF)
1149 tree field = TREE_OPERAND (inner, 1);
1150 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1151 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1153 /* Bitfields are generally not byte-addressable. */
1154 gcc_assert (!DECL_BIT_FIELD (field)
1155 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1156 % BITS_PER_UNIT) == 0
1157 && host_integerp (DECL_SIZE (field), 0)
1158 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1159 % BITS_PER_UNIT) == 0));
1161 /* If we can prove that the memory starting at XEXP (mem, 0) and
1162 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1163 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1164 fields without DECL_SIZE_UNIT like flexible array members. */
1165 if (length >= 0
1166 && DECL_SIZE_UNIT (field)
1167 && host_integerp (DECL_SIZE_UNIT (field), 0))
1169 HOST_WIDE_INT size
1170 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1171 if (offset <= size
1172 && length <= size
1173 && offset + length <= size)
1174 break;
1177 if (offset >= 0
1178 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1179 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1180 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1181 / BITS_PER_UNIT;
1182 else
1184 offset = -1;
1185 length = -1;
1188 mem_expr = TREE_OPERAND (mem_expr, 0);
1189 inner = TREE_OPERAND (inner, 0);
1192 if (mem_expr == NULL)
1193 offset = -1;
1194 if (mem_expr != MEM_EXPR (mem))
1196 set_mem_expr (mem, mem_expr);
1197 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1200 set_mem_alias_set (mem, 0);
1201 set_mem_size (mem, NULL_RTX);
1204 return mem;
1207 /* Built-in functions to perform an untyped call and return. */
1209 /* For each register that may be used for calling a function, this
1210 gives a mode used to copy the register's value. VOIDmode indicates
1211 the register is not used for calling a function. If the machine
1212 has register windows, this gives only the outbound registers.
1213 INCOMING_REGNO gives the corresponding inbound register. */
1214 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1216 /* For each register that may be used for returning values, this gives
1217 a mode used to copy the register's value. VOIDmode indicates the
1218 register is not used for returning values. If the machine has
1219 register windows, this gives only the outbound registers.
1220 INCOMING_REGNO gives the corresponding inbound register. */
1221 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1223 /* For each register that may be used for calling a function, this
1224 gives the offset of that register into the block returned by
1225 __builtin_apply_args. 0 indicates that the register is not
1226 used for calling a function. */
1227 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1229 /* Return the size required for the block returned by __builtin_apply_args,
1230 and initialize apply_args_mode. */
1232 static int
1233 apply_args_size (void)
1235 static int size = -1;
1236 int align;
1237 unsigned int regno;
1238 enum machine_mode mode;
1240 /* The values computed by this function never change. */
1241 if (size < 0)
1243 /* The first value is the incoming arg-pointer. */
1244 size = GET_MODE_SIZE (Pmode);
1246 /* The second value is the structure value address unless this is
1247 passed as an "invisible" first argument. */
1248 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1249 size += GET_MODE_SIZE (Pmode);
1251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1252 if (FUNCTION_ARG_REGNO_P (regno))
1254 mode = reg_raw_mode[regno];
1256 gcc_assert (mode != VOIDmode);
1258 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1259 if (size % align != 0)
1260 size = CEIL (size, align) * align;
1261 apply_args_reg_offset[regno] = size;
1262 size += GET_MODE_SIZE (mode);
1263 apply_args_mode[regno] = mode;
1265 else
1267 apply_args_mode[regno] = VOIDmode;
1268 apply_args_reg_offset[regno] = 0;
1271 return size;
1274 /* Return the size required for the block returned by __builtin_apply,
1275 and initialize apply_result_mode. */
1277 static int
1278 apply_result_size (void)
1280 static int size = -1;
1281 int align, regno;
1282 enum machine_mode mode;
1284 /* The values computed by this function never change. */
1285 if (size < 0)
1287 size = 0;
1289 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1290 if (FUNCTION_VALUE_REGNO_P (regno))
1292 mode = reg_raw_mode[regno];
1294 gcc_assert (mode != VOIDmode);
1296 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1297 if (size % align != 0)
1298 size = CEIL (size, align) * align;
1299 size += GET_MODE_SIZE (mode);
1300 apply_result_mode[regno] = mode;
1302 else
1303 apply_result_mode[regno] = VOIDmode;
1305 /* Allow targets that use untyped_call and untyped_return to override
1306 the size so that machine-specific information can be stored here. */
1307 #ifdef APPLY_RESULT_SIZE
1308 size = APPLY_RESULT_SIZE;
1309 #endif
1311 return size;
1314 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1315 /* Create a vector describing the result block RESULT. If SAVEP is true,
1316 the result block is used to save the values; otherwise it is used to
1317 restore the values. */
1319 static rtx
1320 result_vector (int savep, rtx result)
1322 int regno, size, align, nelts;
1323 enum machine_mode mode;
1324 rtx reg, mem;
1325 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1327 size = nelts = 0;
1328 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1329 if ((mode = apply_result_mode[regno]) != VOIDmode)
1331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332 if (size % align != 0)
1333 size = CEIL (size, align) * align;
1334 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1335 mem = adjust_address (result, mode, size);
1336 savevec[nelts++] = (savep
1337 ? gen_rtx_SET (VOIDmode, mem, reg)
1338 : gen_rtx_SET (VOIDmode, reg, mem));
1339 size += GET_MODE_SIZE (mode);
1341 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1343 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1345 /* Save the state required to perform an untyped call with the same
1346 arguments as were passed to the current function. */
1348 static rtx
1349 expand_builtin_apply_args_1 (void)
1351 rtx registers, tem;
1352 int size, align, regno;
1353 enum machine_mode mode;
1354 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1356 /* Create a block where the arg-pointer, structure value address,
1357 and argument registers can be saved. */
1358 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1360 /* Walk past the arg-pointer and structure value address. */
1361 size = GET_MODE_SIZE (Pmode);
1362 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1363 size += GET_MODE_SIZE (Pmode);
1365 /* Save each register used in calling a function to the block. */
1366 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1367 if ((mode = apply_args_mode[regno]) != VOIDmode)
1369 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1370 if (size % align != 0)
1371 size = CEIL (size, align) * align;
1373 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1375 emit_move_insn (adjust_address (registers, mode, size), tem);
1376 size += GET_MODE_SIZE (mode);
1379 /* Save the arg pointer to the block. */
1380 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1381 #ifdef STACK_GROWS_DOWNWARD
1382 /* We need the pointer as the caller actually passed them to us, not
1383 as we might have pretended they were passed. Make sure it's a valid
1384 operand, as emit_move_insn isn't expected to handle a PLUS. */
1386 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1387 NULL_RTX);
1388 #endif
1389 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1391 size = GET_MODE_SIZE (Pmode);
1393 /* Save the structure value address unless this is passed as an
1394 "invisible" first argument. */
1395 if (struct_incoming_value)
1397 emit_move_insn (adjust_address (registers, Pmode, size),
1398 copy_to_reg (struct_incoming_value));
1399 size += GET_MODE_SIZE (Pmode);
1402 /* Return the address of the block. */
1403 return copy_addr_to_reg (XEXP (registers, 0));
1406 /* __builtin_apply_args returns block of memory allocated on
1407 the stack into which is stored the arg pointer, structure
1408 value address, static chain, and all the registers that might
1409 possibly be used in performing a function call. The code is
1410 moved to the start of the function so the incoming values are
1411 saved. */
1413 static rtx
1414 expand_builtin_apply_args (void)
1416 /* Don't do __builtin_apply_args more than once in a function.
1417 Save the result of the first call and reuse it. */
1418 if (apply_args_value != 0)
1419 return apply_args_value;
1421 /* When this function is called, it means that registers must be
1422 saved on entry to this function. So we migrate the
1423 call to the first insn of this function. */
1424 rtx temp;
1425 rtx seq;
1427 start_sequence ();
1428 temp = expand_builtin_apply_args_1 ();
1429 seq = get_insns ();
1430 end_sequence ();
1432 apply_args_value = temp;
1434 /* Put the insns after the NOTE that starts the function.
1435 If this is inside a start_sequence, make the outer-level insn
1436 chain current, so the code is placed at the start of the
1437 function. If internal_arg_pointer is a non-virtual pseudo,
1438 it needs to be placed after the function that initializes
1439 that pseudo. */
1440 push_topmost_sequence ();
1441 if (REG_P (crtl->args.internal_arg_pointer)
1442 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1443 emit_insn_before (seq, parm_birth_insn);
1444 else
1445 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1446 pop_topmost_sequence ();
1447 return temp;
1451 /* Perform an untyped call and save the state required to perform an
1452 untyped return of whatever value was returned by the given function. */
1454 static rtx
1455 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1457 int size, align, regno;
1458 enum machine_mode mode;
1459 rtx incoming_args, result, reg, dest, src, call_insn;
1460 rtx old_stack_level = 0;
1461 rtx call_fusage = 0;
1462 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1464 arguments = convert_memory_address (Pmode, arguments);
1466 /* Create a block where the return registers can be saved. */
1467 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1469 /* Fetch the arg pointer from the ARGUMENTS block. */
1470 incoming_args = gen_reg_rtx (Pmode);
1471 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1472 #ifndef STACK_GROWS_DOWNWARD
1473 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1474 incoming_args, 0, OPTAB_LIB_WIDEN);
1475 #endif
1477 /* Push a new argument block and copy the arguments. Do not allow
1478 the (potential) memcpy call below to interfere with our stack
1479 manipulations. */
1480 do_pending_stack_adjust ();
1481 NO_DEFER_POP;
1483 /* Save the stack with nonlocal if available. */
1484 #ifdef HAVE_save_stack_nonlocal
1485 if (HAVE_save_stack_nonlocal)
1486 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1487 else
1488 #endif
1489 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1491 /* Allocate a block of memory onto the stack and copy the memory
1492 arguments to the outgoing arguments address. */
1493 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1495 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1496 may have already set current_function_calls_alloca to true.
1497 current_function_calls_alloca won't be set if argsize is zero,
1498 so we have to guarantee need_drap is true here. */
1499 if (SUPPORTS_STACK_ALIGNMENT)
1500 crtl->need_drap = true;
1502 dest = virtual_outgoing_args_rtx;
1503 #ifndef STACK_GROWS_DOWNWARD
1504 if (GET_CODE (argsize) == CONST_INT)
1505 dest = plus_constant (dest, -INTVAL (argsize));
1506 else
1507 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1508 #endif
1509 dest = gen_rtx_MEM (BLKmode, dest);
1510 set_mem_align (dest, PARM_BOUNDARY);
1511 src = gen_rtx_MEM (BLKmode, incoming_args);
1512 set_mem_align (src, PARM_BOUNDARY);
1513 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1515 /* Refer to the argument block. */
1516 apply_args_size ();
1517 arguments = gen_rtx_MEM (BLKmode, arguments);
1518 set_mem_align (arguments, PARM_BOUNDARY);
1520 /* Walk past the arg-pointer and structure value address. */
1521 size = GET_MODE_SIZE (Pmode);
1522 if (struct_value)
1523 size += GET_MODE_SIZE (Pmode);
1525 /* Restore each of the registers previously saved. Make USE insns
1526 for each of these registers for use in making the call. */
1527 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1528 if ((mode = apply_args_mode[regno]) != VOIDmode)
1530 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1531 if (size % align != 0)
1532 size = CEIL (size, align) * align;
1533 reg = gen_rtx_REG (mode, regno);
1534 emit_move_insn (reg, adjust_address (arguments, mode, size));
1535 use_reg (&call_fusage, reg);
1536 size += GET_MODE_SIZE (mode);
1539 /* Restore the structure value address unless this is passed as an
1540 "invisible" first argument. */
1541 size = GET_MODE_SIZE (Pmode);
1542 if (struct_value)
1544 rtx value = gen_reg_rtx (Pmode);
1545 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1546 emit_move_insn (struct_value, value);
1547 if (REG_P (struct_value))
1548 use_reg (&call_fusage, struct_value);
1549 size += GET_MODE_SIZE (Pmode);
1552 /* All arguments and registers used for the call are set up by now! */
1553 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1555 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1556 and we don't want to load it into a register as an optimization,
1557 because prepare_call_address already did it if it should be done. */
1558 if (GET_CODE (function) != SYMBOL_REF)
1559 function = memory_address (FUNCTION_MODE, function);
1561 /* Generate the actual call instruction and save the return value. */
1562 #ifdef HAVE_untyped_call
1563 if (HAVE_untyped_call)
1564 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1565 result, result_vector (1, result)));
1566 else
1567 #endif
1568 #ifdef HAVE_call_value
1569 if (HAVE_call_value)
1571 rtx valreg = 0;
1573 /* Locate the unique return register. It is not possible to
1574 express a call that sets more than one return register using
1575 call_value; use untyped_call for that. In fact, untyped_call
1576 only needs to save the return registers in the given block. */
1577 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1578 if ((mode = apply_result_mode[regno]) != VOIDmode)
1580 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1582 valreg = gen_rtx_REG (mode, regno);
1585 emit_call_insn (GEN_CALL_VALUE (valreg,
1586 gen_rtx_MEM (FUNCTION_MODE, function),
1587 const0_rtx, NULL_RTX, const0_rtx));
1589 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1591 else
1592 #endif
1593 gcc_unreachable ();
1595 /* Find the CALL insn we just emitted, and attach the register usage
1596 information. */
1597 call_insn = last_call_insn ();
1598 add_function_usage_to (call_insn, call_fusage);
1600 /* Restore the stack. */
1601 #ifdef HAVE_save_stack_nonlocal
1602 if (HAVE_save_stack_nonlocal)
1603 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1604 else
1605 #endif
1606 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1608 OK_DEFER_POP;
1610 /* Return the address of the result block. */
1611 result = copy_addr_to_reg (XEXP (result, 0));
1612 return convert_memory_address (ptr_mode, result);
1615 /* Perform an untyped return. */
1617 static void
1618 expand_builtin_return (rtx result)
1620 int size, align, regno;
1621 enum machine_mode mode;
1622 rtx reg;
1623 rtx call_fusage = 0;
1625 result = convert_memory_address (Pmode, result);
1627 apply_result_size ();
1628 result = gen_rtx_MEM (BLKmode, result);
1630 #ifdef HAVE_untyped_return
1631 if (HAVE_untyped_return)
1633 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1634 emit_barrier ();
1635 return;
1637 #endif
1639 /* Restore the return value and note that each value is used. */
1640 size = 0;
1641 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1642 if ((mode = apply_result_mode[regno]) != VOIDmode)
1644 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1645 if (size % align != 0)
1646 size = CEIL (size, align) * align;
1647 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1648 emit_move_insn (reg, adjust_address (result, mode, size));
1650 push_to_sequence (call_fusage);
1651 emit_use (reg);
1652 call_fusage = get_insns ();
1653 end_sequence ();
1654 size += GET_MODE_SIZE (mode);
1657 /* Put the USE insns before the return. */
1658 emit_insn (call_fusage);
1660 /* Return whatever values was restored by jumping directly to the end
1661 of the function. */
1662 expand_naked_return ();
1665 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1667 static enum type_class
1668 type_to_class (tree type)
1670 switch (TREE_CODE (type))
1672 case VOID_TYPE: return void_type_class;
1673 case INTEGER_TYPE: return integer_type_class;
1674 case ENUMERAL_TYPE: return enumeral_type_class;
1675 case BOOLEAN_TYPE: return boolean_type_class;
1676 case POINTER_TYPE: return pointer_type_class;
1677 case REFERENCE_TYPE: return reference_type_class;
1678 case OFFSET_TYPE: return offset_type_class;
1679 case REAL_TYPE: return real_type_class;
1680 case COMPLEX_TYPE: return complex_type_class;
1681 case FUNCTION_TYPE: return function_type_class;
1682 case METHOD_TYPE: return method_type_class;
1683 case RECORD_TYPE: return record_type_class;
1684 case UNION_TYPE:
1685 case QUAL_UNION_TYPE: return union_type_class;
1686 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1687 ? string_type_class : array_type_class);
1688 case LANG_TYPE: return lang_type_class;
1689 default: return no_type_class;
1693 /* Expand a call EXP to __builtin_classify_type. */
1695 static rtx
1696 expand_builtin_classify_type (tree exp)
1698 if (call_expr_nargs (exp))
1699 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1700 return GEN_INT (no_type_class);
1703 /* This helper macro, meant to be used in mathfn_built_in below,
1704 determines which among a set of three builtin math functions is
1705 appropriate for a given type mode. The `F' and `L' cases are
1706 automatically generated from the `double' case. */
1707 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1708 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1709 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1710 fcodel = BUILT_IN_MATHFN##L ; break;
1711 /* Similar to above, but appends _R after any F/L suffix. */
1712 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1713 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1714 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1715 fcodel = BUILT_IN_MATHFN##L_R ; break;
1717 /* Return mathematic function equivalent to FN but operating directly
1718 on TYPE, if available. If IMPLICIT is true find the function in
1719 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1720 can't do the conversion, return zero. */
1722 static tree
1723 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1725 tree const *const fn_arr
1726 = implicit ? implicit_built_in_decls : built_in_decls;
1727 enum built_in_function fcode, fcodef, fcodel;
1729 switch (fn)
1731 CASE_MATHFN (BUILT_IN_ACOS)
1732 CASE_MATHFN (BUILT_IN_ACOSH)
1733 CASE_MATHFN (BUILT_IN_ASIN)
1734 CASE_MATHFN (BUILT_IN_ASINH)
1735 CASE_MATHFN (BUILT_IN_ATAN)
1736 CASE_MATHFN (BUILT_IN_ATAN2)
1737 CASE_MATHFN (BUILT_IN_ATANH)
1738 CASE_MATHFN (BUILT_IN_CBRT)
1739 CASE_MATHFN (BUILT_IN_CEIL)
1740 CASE_MATHFN (BUILT_IN_CEXPI)
1741 CASE_MATHFN (BUILT_IN_COPYSIGN)
1742 CASE_MATHFN (BUILT_IN_COS)
1743 CASE_MATHFN (BUILT_IN_COSH)
1744 CASE_MATHFN (BUILT_IN_DREM)
1745 CASE_MATHFN (BUILT_IN_ERF)
1746 CASE_MATHFN (BUILT_IN_ERFC)
1747 CASE_MATHFN (BUILT_IN_EXP)
1748 CASE_MATHFN (BUILT_IN_EXP10)
1749 CASE_MATHFN (BUILT_IN_EXP2)
1750 CASE_MATHFN (BUILT_IN_EXPM1)
1751 CASE_MATHFN (BUILT_IN_FABS)
1752 CASE_MATHFN (BUILT_IN_FDIM)
1753 CASE_MATHFN (BUILT_IN_FLOOR)
1754 CASE_MATHFN (BUILT_IN_FMA)
1755 CASE_MATHFN (BUILT_IN_FMAX)
1756 CASE_MATHFN (BUILT_IN_FMIN)
1757 CASE_MATHFN (BUILT_IN_FMOD)
1758 CASE_MATHFN (BUILT_IN_FREXP)
1759 CASE_MATHFN (BUILT_IN_GAMMA)
1760 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1761 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1762 CASE_MATHFN (BUILT_IN_HYPOT)
1763 CASE_MATHFN (BUILT_IN_ILOGB)
1764 CASE_MATHFN (BUILT_IN_INF)
1765 CASE_MATHFN (BUILT_IN_ISINF)
1766 CASE_MATHFN (BUILT_IN_J0)
1767 CASE_MATHFN (BUILT_IN_J1)
1768 CASE_MATHFN (BUILT_IN_JN)
1769 CASE_MATHFN (BUILT_IN_LCEIL)
1770 CASE_MATHFN (BUILT_IN_LDEXP)
1771 CASE_MATHFN (BUILT_IN_LFLOOR)
1772 CASE_MATHFN (BUILT_IN_LGAMMA)
1773 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1774 CASE_MATHFN (BUILT_IN_LLCEIL)
1775 CASE_MATHFN (BUILT_IN_LLFLOOR)
1776 CASE_MATHFN (BUILT_IN_LLRINT)
1777 CASE_MATHFN (BUILT_IN_LLROUND)
1778 CASE_MATHFN (BUILT_IN_LOG)
1779 CASE_MATHFN (BUILT_IN_LOG10)
1780 CASE_MATHFN (BUILT_IN_LOG1P)
1781 CASE_MATHFN (BUILT_IN_LOG2)
1782 CASE_MATHFN (BUILT_IN_LOGB)
1783 CASE_MATHFN (BUILT_IN_LRINT)
1784 CASE_MATHFN (BUILT_IN_LROUND)
1785 CASE_MATHFN (BUILT_IN_MODF)
1786 CASE_MATHFN (BUILT_IN_NAN)
1787 CASE_MATHFN (BUILT_IN_NANS)
1788 CASE_MATHFN (BUILT_IN_NEARBYINT)
1789 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1790 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1791 CASE_MATHFN (BUILT_IN_POW)
1792 CASE_MATHFN (BUILT_IN_POWI)
1793 CASE_MATHFN (BUILT_IN_POW10)
1794 CASE_MATHFN (BUILT_IN_REMAINDER)
1795 CASE_MATHFN (BUILT_IN_REMQUO)
1796 CASE_MATHFN (BUILT_IN_RINT)
1797 CASE_MATHFN (BUILT_IN_ROUND)
1798 CASE_MATHFN (BUILT_IN_SCALB)
1799 CASE_MATHFN (BUILT_IN_SCALBLN)
1800 CASE_MATHFN (BUILT_IN_SCALBN)
1801 CASE_MATHFN (BUILT_IN_SIGNBIT)
1802 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1803 CASE_MATHFN (BUILT_IN_SIN)
1804 CASE_MATHFN (BUILT_IN_SINCOS)
1805 CASE_MATHFN (BUILT_IN_SINH)
1806 CASE_MATHFN (BUILT_IN_SQRT)
1807 CASE_MATHFN (BUILT_IN_TAN)
1808 CASE_MATHFN (BUILT_IN_TANH)
1809 CASE_MATHFN (BUILT_IN_TGAMMA)
1810 CASE_MATHFN (BUILT_IN_TRUNC)
1811 CASE_MATHFN (BUILT_IN_Y0)
1812 CASE_MATHFN (BUILT_IN_Y1)
1813 CASE_MATHFN (BUILT_IN_YN)
1815 default:
1816 return NULL_TREE;
1819 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1820 return fn_arr[fcode];
1821 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1822 return fn_arr[fcodef];
1823 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1824 return fn_arr[fcodel];
1825 else
1826 return NULL_TREE;
1829 /* Like mathfn_built_in_1(), but always use the implicit array. */
1831 tree
1832 mathfn_built_in (tree type, enum built_in_function fn)
1834 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1837 /* If errno must be maintained, expand the RTL to check if the result,
1838 TARGET, of a built-in function call, EXP, is NaN, and if so set
1839 errno to EDOM. */
1841 static void
1842 expand_errno_check (tree exp, rtx target)
1844 rtx lab = gen_label_rtx ();
1846 /* Test the result; if it is NaN, set errno=EDOM because
1847 the argument was not in the domain. */
1848 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1849 0, lab);
1851 #ifdef TARGET_EDOM
1852 /* If this built-in doesn't throw an exception, set errno directly. */
1853 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1855 #ifdef GEN_ERRNO_RTX
1856 rtx errno_rtx = GEN_ERRNO_RTX;
1857 #else
1858 rtx errno_rtx
1859 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1860 #endif
1861 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1862 emit_label (lab);
1863 return;
1865 #endif
1867 /* Make sure the library call isn't expanded as a tail call. */
1868 CALL_EXPR_TAILCALL (exp) = 0;
1870 /* We can't set errno=EDOM directly; let the library call do it.
1871 Pop the arguments right away in case the call gets deleted. */
1872 NO_DEFER_POP;
1873 expand_call (exp, target, 0);
1874 OK_DEFER_POP;
1875 emit_label (lab);
1878 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1879 Return NULL_RTX if a normal call should be emitted rather than expanding
1880 the function in-line. EXP is the expression that is a call to the builtin
1881 function; if convenient, the result should be placed in TARGET.
1882 SUBTARGET may be used as the target for computing one of EXP's operands. */
1884 static rtx
1885 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1887 optab builtin_optab;
1888 rtx op0, insns, before_call;
1889 tree fndecl = get_callee_fndecl (exp);
1890 enum machine_mode mode;
1891 bool errno_set = false;
1892 tree arg;
1894 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1895 return NULL_RTX;
1897 arg = CALL_EXPR_ARG (exp, 0);
1899 switch (DECL_FUNCTION_CODE (fndecl))
1901 CASE_FLT_FN (BUILT_IN_SQRT):
1902 errno_set = ! tree_expr_nonnegative_p (arg);
1903 builtin_optab = sqrt_optab;
1904 break;
1905 CASE_FLT_FN (BUILT_IN_EXP):
1906 errno_set = true; builtin_optab = exp_optab; break;
1907 CASE_FLT_FN (BUILT_IN_EXP10):
1908 CASE_FLT_FN (BUILT_IN_POW10):
1909 errno_set = true; builtin_optab = exp10_optab; break;
1910 CASE_FLT_FN (BUILT_IN_EXP2):
1911 errno_set = true; builtin_optab = exp2_optab; break;
1912 CASE_FLT_FN (BUILT_IN_EXPM1):
1913 errno_set = true; builtin_optab = expm1_optab; break;
1914 CASE_FLT_FN (BUILT_IN_LOGB):
1915 errno_set = true; builtin_optab = logb_optab; break;
1916 CASE_FLT_FN (BUILT_IN_LOG):
1917 errno_set = true; builtin_optab = log_optab; break;
1918 CASE_FLT_FN (BUILT_IN_LOG10):
1919 errno_set = true; builtin_optab = log10_optab; break;
1920 CASE_FLT_FN (BUILT_IN_LOG2):
1921 errno_set = true; builtin_optab = log2_optab; break;
1922 CASE_FLT_FN (BUILT_IN_LOG1P):
1923 errno_set = true; builtin_optab = log1p_optab; break;
1924 CASE_FLT_FN (BUILT_IN_ASIN):
1925 builtin_optab = asin_optab; break;
1926 CASE_FLT_FN (BUILT_IN_ACOS):
1927 builtin_optab = acos_optab; break;
1928 CASE_FLT_FN (BUILT_IN_TAN):
1929 builtin_optab = tan_optab; break;
1930 CASE_FLT_FN (BUILT_IN_ATAN):
1931 builtin_optab = atan_optab; break;
1932 CASE_FLT_FN (BUILT_IN_FLOOR):
1933 builtin_optab = floor_optab; break;
1934 CASE_FLT_FN (BUILT_IN_CEIL):
1935 builtin_optab = ceil_optab; break;
1936 CASE_FLT_FN (BUILT_IN_TRUNC):
1937 builtin_optab = btrunc_optab; break;
1938 CASE_FLT_FN (BUILT_IN_ROUND):
1939 builtin_optab = round_optab; break;
1940 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1941 builtin_optab = nearbyint_optab;
1942 if (flag_trapping_math)
1943 break;
1944 /* Else fallthrough and expand as rint. */
1945 CASE_FLT_FN (BUILT_IN_RINT):
1946 builtin_optab = rint_optab; break;
1947 default:
1948 gcc_unreachable ();
1951 /* Make a suitable register to place result in. */
1952 mode = TYPE_MODE (TREE_TYPE (exp));
1954 if (! flag_errno_math || ! HONOR_NANS (mode))
1955 errno_set = false;
1957 /* Before working hard, check whether the instruction is available. */
1958 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1960 target = gen_reg_rtx (mode);
1962 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1963 need to expand the argument again. This way, we will not perform
1964 side-effects more the once. */
1965 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1967 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1969 start_sequence ();
1971 /* Compute into TARGET.
1972 Set TARGET to wherever the result comes back. */
1973 target = expand_unop (mode, builtin_optab, op0, target, 0);
1975 if (target != 0)
1977 if (errno_set)
1978 expand_errno_check (exp, target);
1980 /* Output the entire sequence. */
1981 insns = get_insns ();
1982 end_sequence ();
1983 emit_insn (insns);
1984 return target;
1987 /* If we were unable to expand via the builtin, stop the sequence
1988 (without outputting the insns) and call to the library function
1989 with the stabilized argument list. */
1990 end_sequence ();
1993 before_call = get_last_insn ();
1995 return expand_call (exp, target, target == const0_rtx);
1998 /* Expand a call to the builtin binary math functions (pow and atan2).
1999 Return NULL_RTX if a normal call should be emitted rather than expanding the
2000 function in-line. EXP is the expression that is a call to the builtin
2001 function; if convenient, the result should be placed in TARGET.
2002 SUBTARGET may be used as the target for computing one of EXP's
2003 operands. */
2005 static rtx
2006 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2008 optab builtin_optab;
2009 rtx op0, op1, insns;
2010 int op1_type = REAL_TYPE;
2011 tree fndecl = get_callee_fndecl (exp);
2012 tree arg0, arg1;
2013 enum machine_mode mode;
2014 bool errno_set = true;
2016 switch (DECL_FUNCTION_CODE (fndecl))
2018 CASE_FLT_FN (BUILT_IN_SCALBN):
2019 CASE_FLT_FN (BUILT_IN_SCALBLN):
2020 CASE_FLT_FN (BUILT_IN_LDEXP):
2021 op1_type = INTEGER_TYPE;
2022 default:
2023 break;
2026 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2027 return NULL_RTX;
2029 arg0 = CALL_EXPR_ARG (exp, 0);
2030 arg1 = CALL_EXPR_ARG (exp, 1);
2032 switch (DECL_FUNCTION_CODE (fndecl))
2034 CASE_FLT_FN (BUILT_IN_POW):
2035 builtin_optab = pow_optab; break;
2036 CASE_FLT_FN (BUILT_IN_ATAN2):
2037 builtin_optab = atan2_optab; break;
2038 CASE_FLT_FN (BUILT_IN_SCALB):
2039 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2040 return 0;
2041 builtin_optab = scalb_optab; break;
2042 CASE_FLT_FN (BUILT_IN_SCALBN):
2043 CASE_FLT_FN (BUILT_IN_SCALBLN):
2044 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2045 return 0;
2046 /* Fall through... */
2047 CASE_FLT_FN (BUILT_IN_LDEXP):
2048 builtin_optab = ldexp_optab; break;
2049 CASE_FLT_FN (BUILT_IN_FMOD):
2050 builtin_optab = fmod_optab; break;
2051 CASE_FLT_FN (BUILT_IN_REMAINDER):
2052 CASE_FLT_FN (BUILT_IN_DREM):
2053 builtin_optab = remainder_optab; break;
2054 default:
2055 gcc_unreachable ();
2058 /* Make a suitable register to place result in. */
2059 mode = TYPE_MODE (TREE_TYPE (exp));
2061 /* Before working hard, check whether the instruction is available. */
2062 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2063 return NULL_RTX;
2065 target = gen_reg_rtx (mode);
2067 if (! flag_errno_math || ! HONOR_NANS (mode))
2068 errno_set = false;
2070 /* Always stabilize the argument list. */
2071 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2072 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2074 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2075 op1 = expand_normal (arg1);
2077 start_sequence ();
2079 /* Compute into TARGET.
2080 Set TARGET to wherever the result comes back. */
2081 target = expand_binop (mode, builtin_optab, op0, op1,
2082 target, 0, OPTAB_DIRECT);
2084 /* If we were unable to expand via the builtin, stop the sequence
2085 (without outputting the insns) and call to the library function
2086 with the stabilized argument list. */
2087 if (target == 0)
2089 end_sequence ();
2090 return expand_call (exp, target, target == const0_rtx);
2093 if (errno_set)
2094 expand_errno_check (exp, target);
2096 /* Output the entire sequence. */
2097 insns = get_insns ();
2098 end_sequence ();
2099 emit_insn (insns);
2101 return target;
2104 /* Expand a call to the builtin sin and cos math functions.
2105 Return NULL_RTX if a normal call should be emitted rather than expanding the
2106 function in-line. EXP is the expression that is a call to the builtin
2107 function; if convenient, the result should be placed in TARGET.
2108 SUBTARGET may be used as the target for computing one of EXP's
2109 operands. */
2111 static rtx
2112 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2114 optab builtin_optab;
2115 rtx op0, insns;
2116 tree fndecl = get_callee_fndecl (exp);
2117 enum machine_mode mode;
2118 tree arg;
2120 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2121 return NULL_RTX;
2123 arg = CALL_EXPR_ARG (exp, 0);
2125 switch (DECL_FUNCTION_CODE (fndecl))
2127 CASE_FLT_FN (BUILT_IN_SIN):
2128 CASE_FLT_FN (BUILT_IN_COS):
2129 builtin_optab = sincos_optab; break;
2130 default:
2131 gcc_unreachable ();
2134 /* Make a suitable register to place result in. */
2135 mode = TYPE_MODE (TREE_TYPE (exp));
2137 /* Check if sincos insn is available, otherwise fallback
2138 to sin or cos insn. */
2139 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2140 switch (DECL_FUNCTION_CODE (fndecl))
2142 CASE_FLT_FN (BUILT_IN_SIN):
2143 builtin_optab = sin_optab; break;
2144 CASE_FLT_FN (BUILT_IN_COS):
2145 builtin_optab = cos_optab; break;
2146 default:
2147 gcc_unreachable ();
2150 /* Before working hard, check whether the instruction is available. */
2151 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2153 target = gen_reg_rtx (mode);
2155 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2156 need to expand the argument again. This way, we will not perform
2157 side-effects more the once. */
2158 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2160 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2162 start_sequence ();
2164 /* Compute into TARGET.
2165 Set TARGET to wherever the result comes back. */
2166 if (builtin_optab == sincos_optab)
2168 int result;
2170 switch (DECL_FUNCTION_CODE (fndecl))
2172 CASE_FLT_FN (BUILT_IN_SIN):
2173 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2174 break;
2175 CASE_FLT_FN (BUILT_IN_COS):
2176 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2177 break;
2178 default:
2179 gcc_unreachable ();
2181 gcc_assert (result);
2183 else
2185 target = expand_unop (mode, builtin_optab, op0, target, 0);
2188 if (target != 0)
2190 /* Output the entire sequence. */
2191 insns = get_insns ();
2192 end_sequence ();
2193 emit_insn (insns);
2194 return target;
2197 /* If we were unable to expand via the builtin, stop the sequence
2198 (without outputting the insns) and call to the library function
2199 with the stabilized argument list. */
2200 end_sequence ();
2203 target = expand_call (exp, target, target == const0_rtx);
2205 return target;
2208 /* Expand a call to one of the builtin math functions that operate on
2209 floating point argument and output an integer result (ilogb, isinf,
2210 isnan, etc).
2211 Return 0 if a normal call should be emitted rather than expanding the
2212 function in-line. EXP is the expression that is a call to the builtin
2213 function; if convenient, the result should be placed in TARGET.
2214 SUBTARGET may be used as the target for computing one of EXP's operands. */
2216 static rtx
2217 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2219 optab builtin_optab = 0;
2220 enum insn_code icode = CODE_FOR_nothing;
2221 rtx op0;
2222 tree fndecl = get_callee_fndecl (exp);
2223 enum machine_mode mode;
2224 bool errno_set = false;
2225 tree arg;
2227 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2228 return NULL_RTX;
2230 arg = CALL_EXPR_ARG (exp, 0);
2232 switch (DECL_FUNCTION_CODE (fndecl))
2234 CASE_FLT_FN (BUILT_IN_ILOGB):
2235 errno_set = true; builtin_optab = ilogb_optab; break;
2236 CASE_FLT_FN (BUILT_IN_ISINF):
2237 builtin_optab = isinf_optab; break;
2238 case BUILT_IN_ISNORMAL:
2239 case BUILT_IN_ISFINITE:
2240 CASE_FLT_FN (BUILT_IN_FINITE):
2241 /* These builtins have no optabs (yet). */
2242 break;
2243 default:
2244 gcc_unreachable ();
2247 /* There's no easy way to detect the case we need to set EDOM. */
2248 if (flag_errno_math && errno_set)
2249 return NULL_RTX;
2251 /* Optab mode depends on the mode of the input argument. */
2252 mode = TYPE_MODE (TREE_TYPE (arg));
2254 if (builtin_optab)
2255 icode = optab_handler (builtin_optab, mode)->insn_code;
2257 /* Before working hard, check whether the instruction is available. */
2258 if (icode != CODE_FOR_nothing)
2260 /* Make a suitable register to place result in. */
2261 if (!target
2262 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2263 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2265 gcc_assert (insn_data[icode].operand[0].predicate
2266 (target, GET_MODE (target)));
2268 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2269 need to expand the argument again. This way, we will not perform
2270 side-effects more the once. */
2271 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2273 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2275 if (mode != GET_MODE (op0))
2276 op0 = convert_to_mode (mode, op0, 0);
2278 /* Compute into TARGET.
2279 Set TARGET to wherever the result comes back. */
2280 emit_unop_insn (icode, target, op0, UNKNOWN);
2281 return target;
2284 /* If there is no optab, try generic code. */
2285 switch (DECL_FUNCTION_CODE (fndecl))
2287 tree result;
2289 CASE_FLT_FN (BUILT_IN_ISINF):
2291 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2292 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2293 tree const type = TREE_TYPE (arg);
2294 REAL_VALUE_TYPE r;
2295 char buf[128];
2297 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2298 real_from_string (&r, buf);
2299 result = build_call_expr (isgr_fn, 2,
2300 fold_build1 (ABS_EXPR, type, arg),
2301 build_real (type, r));
2302 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2304 CASE_FLT_FN (BUILT_IN_FINITE):
2305 case BUILT_IN_ISFINITE:
2307 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2308 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2309 tree const type = TREE_TYPE (arg);
2310 REAL_VALUE_TYPE r;
2311 char buf[128];
2313 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2314 real_from_string (&r, buf);
2315 result = build_call_expr (isle_fn, 2,
2316 fold_build1 (ABS_EXPR, type, arg),
2317 build_real (type, r));
2318 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2320 case BUILT_IN_ISNORMAL:
2322 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2323 islessequal(fabs(x),DBL_MAX). */
2324 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2325 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2326 tree const type = TREE_TYPE (arg);
2327 REAL_VALUE_TYPE rmax, rmin;
2328 char buf[128];
2330 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2331 real_from_string (&rmax, buf);
2332 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2333 real_from_string (&rmin, buf);
2334 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2335 result = build_call_expr (isle_fn, 2, arg,
2336 build_real (type, rmax));
2337 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2338 build_call_expr (isge_fn, 2, arg,
2339 build_real (type, rmin)));
2340 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2342 default:
2343 break;
2346 target = expand_call (exp, target, target == const0_rtx);
2348 return target;
2351 /* Expand a call to the builtin sincos math function.
2352 Return NULL_RTX if a normal call should be emitted rather than expanding the
2353 function in-line. EXP is the expression that is a call to the builtin
2354 function. */
2356 static rtx
2357 expand_builtin_sincos (tree exp)
2359 rtx op0, op1, op2, target1, target2;
2360 enum machine_mode mode;
2361 tree arg, sinp, cosp;
2362 int result;
2364 if (!validate_arglist (exp, REAL_TYPE,
2365 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2366 return NULL_RTX;
2368 arg = CALL_EXPR_ARG (exp, 0);
2369 sinp = CALL_EXPR_ARG (exp, 1);
2370 cosp = CALL_EXPR_ARG (exp, 2);
2372 /* Make a suitable register to place result in. */
2373 mode = TYPE_MODE (TREE_TYPE (arg));
2375 /* Check if sincos insn is available, otherwise emit the call. */
2376 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2377 return NULL_RTX;
2379 target1 = gen_reg_rtx (mode);
2380 target2 = gen_reg_rtx (mode);
2382 op0 = expand_normal (arg);
2383 op1 = expand_normal (build_fold_indirect_ref (sinp));
2384 op2 = expand_normal (build_fold_indirect_ref (cosp));
2386 /* Compute into target1 and target2.
2387 Set TARGET to wherever the result comes back. */
2388 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2389 gcc_assert (result);
2391 /* Move target1 and target2 to the memory locations indicated
2392 by op1 and op2. */
2393 emit_move_insn (op1, target1);
2394 emit_move_insn (op2, target2);
2396 return const0_rtx;
2399 /* Expand a call to the internal cexpi builtin to the sincos math function.
2400 EXP is the expression that is a call to the builtin function; if convenient,
2401 the result should be placed in TARGET. SUBTARGET may be used as the target
2402 for computing one of EXP's operands. */
2404 static rtx
2405 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2407 tree fndecl = get_callee_fndecl (exp);
2408 tree arg, type;
2409 enum machine_mode mode;
2410 rtx op0, op1, op2;
2412 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2413 return NULL_RTX;
2415 arg = CALL_EXPR_ARG (exp, 0);
2416 type = TREE_TYPE (arg);
2417 mode = TYPE_MODE (TREE_TYPE (arg));
2419 /* Try expanding via a sincos optab, fall back to emitting a libcall
2420 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2421 is only generated from sincos, cexp or if we have either of them. */
2422 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2424 op1 = gen_reg_rtx (mode);
2425 op2 = gen_reg_rtx (mode);
2427 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2429 /* Compute into op1 and op2. */
2430 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2432 else if (TARGET_HAS_SINCOS)
2434 tree call, fn = NULL_TREE;
2435 tree top1, top2;
2436 rtx op1a, op2a;
2438 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2439 fn = built_in_decls[BUILT_IN_SINCOSF];
2440 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2441 fn = built_in_decls[BUILT_IN_SINCOS];
2442 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2443 fn = built_in_decls[BUILT_IN_SINCOSL];
2444 else
2445 gcc_unreachable ();
2447 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2448 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2449 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2450 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2451 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2452 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2454 /* Make sure not to fold the sincos call again. */
2455 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2456 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2457 call, 3, arg, top1, top2));
2459 else
2461 tree call, fn = NULL_TREE, narg;
2462 tree ctype = build_complex_type (type);
2464 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2465 fn = built_in_decls[BUILT_IN_CEXPF];
2466 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2467 fn = built_in_decls[BUILT_IN_CEXP];
2468 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2469 fn = built_in_decls[BUILT_IN_CEXPL];
2470 else
2471 gcc_unreachable ();
2473 /* If we don't have a decl for cexp create one. This is the
2474 friendliest fallback if the user calls __builtin_cexpi
2475 without full target C99 function support. */
2476 if (fn == NULL_TREE)
2478 tree fntype;
2479 const char *name = NULL;
2481 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2482 name = "cexpf";
2483 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2484 name = "cexp";
2485 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2486 name = "cexpl";
2488 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2489 fn = build_fn_decl (name, fntype);
2492 narg = fold_build2 (COMPLEX_EXPR, ctype,
2493 build_real (type, dconst0), arg);
2495 /* Make sure not to fold the cexp call again. */
2496 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2497 return expand_expr (build_call_nary (ctype, call, 1, narg),
2498 target, VOIDmode, EXPAND_NORMAL);
2501 /* Now build the proper return type. */
2502 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2503 make_tree (TREE_TYPE (arg), op2),
2504 make_tree (TREE_TYPE (arg), op1)),
2505 target, VOIDmode, EXPAND_NORMAL);
2508 /* Expand a call to one of the builtin rounding functions gcc defines
2509 as an extension (lfloor and lceil). As these are gcc extensions we
2510 do not need to worry about setting errno to EDOM.
2511 If expanding via optab fails, lower expression to (int)(floor(x)).
2512 EXP is the expression that is a call to the builtin function;
2513 if convenient, the result should be placed in TARGET. */
2515 static rtx
2516 expand_builtin_int_roundingfn (tree exp, rtx target)
2518 convert_optab builtin_optab;
2519 rtx op0, insns, tmp;
2520 tree fndecl = get_callee_fndecl (exp);
2521 enum built_in_function fallback_fn;
2522 tree fallback_fndecl;
2523 enum machine_mode mode;
2524 tree arg;
2526 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2527 gcc_unreachable ();
2529 arg = CALL_EXPR_ARG (exp, 0);
2531 switch (DECL_FUNCTION_CODE (fndecl))
2533 CASE_FLT_FN (BUILT_IN_LCEIL):
2534 CASE_FLT_FN (BUILT_IN_LLCEIL):
2535 builtin_optab = lceil_optab;
2536 fallback_fn = BUILT_IN_CEIL;
2537 break;
2539 CASE_FLT_FN (BUILT_IN_LFLOOR):
2540 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2541 builtin_optab = lfloor_optab;
2542 fallback_fn = BUILT_IN_FLOOR;
2543 break;
2545 default:
2546 gcc_unreachable ();
2549 /* Make a suitable register to place result in. */
2550 mode = TYPE_MODE (TREE_TYPE (exp));
2552 target = gen_reg_rtx (mode);
2554 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2555 need to expand the argument again. This way, we will not perform
2556 side-effects more the once. */
2557 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2559 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2561 start_sequence ();
2563 /* Compute into TARGET. */
2564 if (expand_sfix_optab (target, op0, builtin_optab))
2566 /* Output the entire sequence. */
2567 insns = get_insns ();
2568 end_sequence ();
2569 emit_insn (insns);
2570 return target;
2573 /* If we were unable to expand via the builtin, stop the sequence
2574 (without outputting the insns). */
2575 end_sequence ();
2577 /* Fall back to floating point rounding optab. */
2578 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2580 /* For non-C99 targets we may end up without a fallback fndecl here
2581 if the user called __builtin_lfloor directly. In this case emit
2582 a call to the floor/ceil variants nevertheless. This should result
2583 in the best user experience for not full C99 targets. */
2584 if (fallback_fndecl == NULL_TREE)
2586 tree fntype;
2587 const char *name = NULL;
2589 switch (DECL_FUNCTION_CODE (fndecl))
2591 case BUILT_IN_LCEIL:
2592 case BUILT_IN_LLCEIL:
2593 name = "ceil";
2594 break;
2595 case BUILT_IN_LCEILF:
2596 case BUILT_IN_LLCEILF:
2597 name = "ceilf";
2598 break;
2599 case BUILT_IN_LCEILL:
2600 case BUILT_IN_LLCEILL:
2601 name = "ceill";
2602 break;
2603 case BUILT_IN_LFLOOR:
2604 case BUILT_IN_LLFLOOR:
2605 name = "floor";
2606 break;
2607 case BUILT_IN_LFLOORF:
2608 case BUILT_IN_LLFLOORF:
2609 name = "floorf";
2610 break;
2611 case BUILT_IN_LFLOORL:
2612 case BUILT_IN_LLFLOORL:
2613 name = "floorl";
2614 break;
2615 default:
2616 gcc_unreachable ();
2619 fntype = build_function_type_list (TREE_TYPE (arg),
2620 TREE_TYPE (arg), NULL_TREE);
2621 fallback_fndecl = build_fn_decl (name, fntype);
2624 exp = build_call_expr (fallback_fndecl, 1, arg);
2626 tmp = expand_normal (exp);
2628 /* Truncate the result of floating point optab to integer
2629 via expand_fix (). */
2630 target = gen_reg_rtx (mode);
2631 expand_fix (target, tmp, 0);
2633 return target;
2636 /* Expand a call to one of the builtin math functions doing integer
2637 conversion (lrint).
2638 Return 0 if a normal call should be emitted rather than expanding the
2639 function in-line. EXP is the expression that is a call to the builtin
2640 function; if convenient, the result should be placed in TARGET. */
2642 static rtx
2643 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2645 convert_optab builtin_optab;
2646 rtx op0, insns;
2647 tree fndecl = get_callee_fndecl (exp);
2648 tree arg;
2649 enum machine_mode mode;
2651 /* There's no easy way to detect the case we need to set EDOM. */
2652 if (flag_errno_math)
2653 return NULL_RTX;
2655 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2656 gcc_unreachable ();
2658 arg = CALL_EXPR_ARG (exp, 0);
2660 switch (DECL_FUNCTION_CODE (fndecl))
2662 CASE_FLT_FN (BUILT_IN_LRINT):
2663 CASE_FLT_FN (BUILT_IN_LLRINT):
2664 builtin_optab = lrint_optab; break;
2665 CASE_FLT_FN (BUILT_IN_LROUND):
2666 CASE_FLT_FN (BUILT_IN_LLROUND):
2667 builtin_optab = lround_optab; break;
2668 default:
2669 gcc_unreachable ();
2672 /* Make a suitable register to place result in. */
2673 mode = TYPE_MODE (TREE_TYPE (exp));
2675 target = gen_reg_rtx (mode);
2677 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2678 need to expand the argument again. This way, we will not perform
2679 side-effects more the once. */
2680 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2682 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2684 start_sequence ();
2686 if (expand_sfix_optab (target, op0, builtin_optab))
2688 /* Output the entire sequence. */
2689 insns = get_insns ();
2690 end_sequence ();
2691 emit_insn (insns);
2692 return target;
2695 /* If we were unable to expand via the builtin, stop the sequence
2696 (without outputting the insns) and call to the library function
2697 with the stabilized argument list. */
2698 end_sequence ();
2700 target = expand_call (exp, target, target == const0_rtx);
2702 return target;
2705 /* To evaluate powi(x,n), the floating point value x raised to the
2706 constant integer exponent n, we use a hybrid algorithm that
2707 combines the "window method" with look-up tables. For an
2708 introduction to exponentiation algorithms and "addition chains",
2709 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2710 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2711 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2712 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2714 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2715 multiplications to inline before calling the system library's pow
2716 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2717 so this default never requires calling pow, powf or powl. */
2719 #ifndef POWI_MAX_MULTS
2720 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2721 #endif
2723 /* The size of the "optimal power tree" lookup table. All
2724 exponents less than this value are simply looked up in the
2725 powi_table below. This threshold is also used to size the
2726 cache of pseudo registers that hold intermediate results. */
2727 #define POWI_TABLE_SIZE 256
2729 /* The size, in bits of the window, used in the "window method"
2730 exponentiation algorithm. This is equivalent to a radix of
2731 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2732 #define POWI_WINDOW_SIZE 3
2734 /* The following table is an efficient representation of an
2735 "optimal power tree". For each value, i, the corresponding
2736 value, j, in the table states than an optimal evaluation
2737 sequence for calculating pow(x,i) can be found by evaluating
2738 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2739 100 integers is given in Knuth's "Seminumerical algorithms". */
2741 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2743 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2744 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2745 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2746 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2747 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2748 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2749 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2750 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2751 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2752 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2753 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2754 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2755 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2756 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2757 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2758 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2759 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2760 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2761 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2762 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2763 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2764 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2765 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2766 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2767 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2768 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2769 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2770 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2771 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2772 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2773 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2774 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2778 /* Return the number of multiplications required to calculate
2779 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2780 subroutine of powi_cost. CACHE is an array indicating
2781 which exponents have already been calculated. */
2783 static int
2784 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2786 /* If we've already calculated this exponent, then this evaluation
2787 doesn't require any additional multiplications. */
2788 if (cache[n])
2789 return 0;
2791 cache[n] = true;
2792 return powi_lookup_cost (n - powi_table[n], cache)
2793 + powi_lookup_cost (powi_table[n], cache) + 1;
2796 /* Return the number of multiplications required to calculate
2797 powi(x,n) for an arbitrary x, given the exponent N. This
2798 function needs to be kept in sync with expand_powi below. */
2800 static int
2801 powi_cost (HOST_WIDE_INT n)
2803 bool cache[POWI_TABLE_SIZE];
2804 unsigned HOST_WIDE_INT digit;
2805 unsigned HOST_WIDE_INT val;
2806 int result;
2808 if (n == 0)
2809 return 0;
2811 /* Ignore the reciprocal when calculating the cost. */
2812 val = (n < 0) ? -n : n;
2814 /* Initialize the exponent cache. */
2815 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2816 cache[1] = true;
2818 result = 0;
2820 while (val >= POWI_TABLE_SIZE)
2822 if (val & 1)
2824 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2825 result += powi_lookup_cost (digit, cache)
2826 + POWI_WINDOW_SIZE + 1;
2827 val >>= POWI_WINDOW_SIZE;
2829 else
2831 val >>= 1;
2832 result++;
2836 return result + powi_lookup_cost (val, cache);
2839 /* Recursive subroutine of expand_powi. This function takes the array,
2840 CACHE, of already calculated exponents and an exponent N and returns
2841 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2843 static rtx
2844 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2846 unsigned HOST_WIDE_INT digit;
2847 rtx target, result;
2848 rtx op0, op1;
2850 if (n < POWI_TABLE_SIZE)
2852 if (cache[n])
2853 return cache[n];
2855 target = gen_reg_rtx (mode);
2856 cache[n] = target;
2858 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2859 op1 = expand_powi_1 (mode, powi_table[n], cache);
2861 else if (n & 1)
2863 target = gen_reg_rtx (mode);
2864 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2865 op0 = expand_powi_1 (mode, n - digit, cache);
2866 op1 = expand_powi_1 (mode, digit, cache);
2868 else
2870 target = gen_reg_rtx (mode);
2871 op0 = expand_powi_1 (mode, n >> 1, cache);
2872 op1 = op0;
2875 result = expand_mult (mode, op0, op1, target, 0);
2876 if (result != target)
2877 emit_move_insn (target, result);
2878 return target;
2881 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2882 floating point operand in mode MODE, and N is the exponent. This
2883 function needs to be kept in sync with powi_cost above. */
2885 static rtx
2886 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2888 unsigned HOST_WIDE_INT val;
2889 rtx cache[POWI_TABLE_SIZE];
2890 rtx result;
2892 if (n == 0)
2893 return CONST1_RTX (mode);
2895 val = (n < 0) ? -n : n;
2897 memset (cache, 0, sizeof (cache));
2898 cache[1] = x;
2900 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2902 /* If the original exponent was negative, reciprocate the result. */
2903 if (n < 0)
2904 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2905 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2907 return result;
2910 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2911 a normal call should be emitted rather than expanding the function
2912 in-line. EXP is the expression that is a call to the builtin
2913 function; if convenient, the result should be placed in TARGET. */
2915 static rtx
2916 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2918 tree arg0, arg1;
2919 tree fn, narg0;
2920 tree type = TREE_TYPE (exp);
2921 REAL_VALUE_TYPE cint, c, c2;
2922 HOST_WIDE_INT n;
2923 rtx op, op2;
2924 enum machine_mode mode = TYPE_MODE (type);
2926 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2927 return NULL_RTX;
2929 arg0 = CALL_EXPR_ARG (exp, 0);
2930 arg1 = CALL_EXPR_ARG (exp, 1);
2932 if (TREE_CODE (arg1) != REAL_CST
2933 || TREE_OVERFLOW (arg1))
2934 return expand_builtin_mathfn_2 (exp, target, subtarget);
2936 /* Handle constant exponents. */
2938 /* For integer valued exponents we can expand to an optimal multiplication
2939 sequence using expand_powi. */
2940 c = TREE_REAL_CST (arg1);
2941 n = real_to_integer (&c);
2942 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2943 if (real_identical (&c, &cint)
2944 && ((n >= -1 && n <= 2)
2945 || (flag_unsafe_math_optimizations
2946 && optimize_insn_for_speed_p ()
2947 && powi_cost (n) <= POWI_MAX_MULTS)))
2949 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2950 if (n != 1)
2952 op = force_reg (mode, op);
2953 op = expand_powi (op, mode, n);
2955 return op;
2958 narg0 = builtin_save_expr (arg0);
2960 /* If the exponent is not integer valued, check if it is half of an integer.
2961 In this case we can expand to sqrt (x) * x**(n/2). */
2962 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2963 if (fn != NULL_TREE)
2965 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2966 n = real_to_integer (&c2);
2967 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2968 if (real_identical (&c2, &cint)
2969 && ((flag_unsafe_math_optimizations
2970 && optimize_insn_for_speed_p ()
2971 && powi_cost (n/2) <= POWI_MAX_MULTS)
2972 || n == 1))
2974 tree call_expr = build_call_expr (fn, 1, narg0);
2975 /* Use expand_expr in case the newly built call expression
2976 was folded to a non-call. */
2977 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2978 if (n != 1)
2980 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2981 op2 = force_reg (mode, op2);
2982 op2 = expand_powi (op2, mode, abs (n / 2));
2983 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2984 0, OPTAB_LIB_WIDEN);
2985 /* If the original exponent was negative, reciprocate the
2986 result. */
2987 if (n < 0)
2988 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2989 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2991 return op;
2995 /* Try if the exponent is a third of an integer. In this case
2996 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2997 different from pow (x, 1./3.) due to rounding and behavior
2998 with negative x we need to constrain this transformation to
2999 unsafe math and positive x or finite math. */
3000 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3001 if (fn != NULL_TREE
3002 && flag_unsafe_math_optimizations
3003 && (tree_expr_nonnegative_p (arg0)
3004 || !HONOR_NANS (mode)))
3006 REAL_VALUE_TYPE dconst3;
3007 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3008 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3009 real_round (&c2, mode, &c2);
3010 n = real_to_integer (&c2);
3011 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3012 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3013 real_convert (&c2, mode, &c2);
3014 if (real_identical (&c2, &c)
3015 && ((optimize_insn_for_speed_p ()
3016 && powi_cost (n/3) <= POWI_MAX_MULTS)
3017 || n == 1))
3019 tree call_expr = build_call_expr (fn, 1,narg0);
3020 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3021 if (abs (n) % 3 == 2)
3022 op = expand_simple_binop (mode, MULT, op, op, op,
3023 0, OPTAB_LIB_WIDEN);
3024 if (n != 1)
3026 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3027 op2 = force_reg (mode, op2);
3028 op2 = expand_powi (op2, mode, abs (n / 3));
3029 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3030 0, OPTAB_LIB_WIDEN);
3031 /* If the original exponent was negative, reciprocate the
3032 result. */
3033 if (n < 0)
3034 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3035 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3037 return op;
3041 /* Fall back to optab expansion. */
3042 return expand_builtin_mathfn_2 (exp, target, subtarget);
3045 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3046 a normal call should be emitted rather than expanding the function
3047 in-line. EXP is the expression that is a call to the builtin
3048 function; if convenient, the result should be placed in TARGET. */
3050 static rtx
3051 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3053 tree arg0, arg1;
3054 rtx op0, op1;
3055 enum machine_mode mode;
3056 enum machine_mode mode2;
3058 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3059 return NULL_RTX;
3061 arg0 = CALL_EXPR_ARG (exp, 0);
3062 arg1 = CALL_EXPR_ARG (exp, 1);
3063 mode = TYPE_MODE (TREE_TYPE (exp));
3065 /* Handle constant power. */
3067 if (TREE_CODE (arg1) == INTEGER_CST
3068 && !TREE_OVERFLOW (arg1))
3070 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3072 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3073 Otherwise, check the number of multiplications required. */
3074 if ((TREE_INT_CST_HIGH (arg1) == 0
3075 || TREE_INT_CST_HIGH (arg1) == -1)
3076 && ((n >= -1 && n <= 2)
3077 || (optimize_insn_for_speed_p ()
3078 && powi_cost (n) <= POWI_MAX_MULTS)))
3080 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3081 op0 = force_reg (mode, op0);
3082 return expand_powi (op0, mode, n);
3086 /* Emit a libcall to libgcc. */
3088 /* Mode of the 2nd argument must match that of an int. */
3089 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3091 if (target == NULL_RTX)
3092 target = gen_reg_rtx (mode);
3094 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3095 if (GET_MODE (op0) != mode)
3096 op0 = convert_to_mode (mode, op0, 0);
3097 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3098 if (GET_MODE (op1) != mode2)
3099 op1 = convert_to_mode (mode2, op1, 0);
3101 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3102 target, LCT_CONST, mode, 2,
3103 op0, mode, op1, mode2);
3105 return target;
3108 /* Expand expression EXP which is a call to the strlen builtin. Return
3109 NULL_RTX if we failed the caller should emit a normal call, otherwise
3110 try to get the result in TARGET, if convenient. */
3112 static rtx
3113 expand_builtin_strlen (tree exp, rtx target,
3114 enum machine_mode target_mode)
3116 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3117 return NULL_RTX;
3118 else
3120 rtx pat;
3121 tree len;
3122 tree src = CALL_EXPR_ARG (exp, 0);
3123 rtx result, src_reg, char_rtx, before_strlen;
3124 enum machine_mode insn_mode = target_mode, char_mode;
3125 enum insn_code icode = CODE_FOR_nothing;
3126 int align;
3128 /* If the length can be computed at compile-time, return it. */
3129 len = c_strlen (src, 0);
3130 if (len)
3131 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3133 /* If the length can be computed at compile-time and is constant
3134 integer, but there are side-effects in src, evaluate
3135 src for side-effects, then return len.
3136 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3137 can be optimized into: i++; x = 3; */
3138 len = c_strlen (src, 1);
3139 if (len && TREE_CODE (len) == INTEGER_CST)
3141 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3142 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3145 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3147 /* If SRC is not a pointer type, don't do this operation inline. */
3148 if (align == 0)
3149 return NULL_RTX;
3151 /* Bail out if we can't compute strlen in the right mode. */
3152 while (insn_mode != VOIDmode)
3154 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3155 if (icode != CODE_FOR_nothing)
3156 break;
3158 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3160 if (insn_mode == VOIDmode)
3161 return NULL_RTX;
3163 /* Make a place to write the result of the instruction. */
3164 result = target;
3165 if (! (result != 0
3166 && REG_P (result)
3167 && GET_MODE (result) == insn_mode
3168 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3169 result = gen_reg_rtx (insn_mode);
3171 /* Make a place to hold the source address. We will not expand
3172 the actual source until we are sure that the expansion will
3173 not fail -- there are trees that cannot be expanded twice. */
3174 src_reg = gen_reg_rtx (Pmode);
3176 /* Mark the beginning of the strlen sequence so we can emit the
3177 source operand later. */
3178 before_strlen = get_last_insn ();
3180 char_rtx = const0_rtx;
3181 char_mode = insn_data[(int) icode].operand[2].mode;
3182 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3183 char_mode))
3184 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3186 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3187 char_rtx, GEN_INT (align));
3188 if (! pat)
3189 return NULL_RTX;
3190 emit_insn (pat);
3192 /* Now that we are assured of success, expand the source. */
3193 start_sequence ();
3194 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3195 if (pat != src_reg)
3196 emit_move_insn (src_reg, pat);
3197 pat = get_insns ();
3198 end_sequence ();
3200 if (before_strlen)
3201 emit_insn_after (pat, before_strlen);
3202 else
3203 emit_insn_before (pat, get_insns ());
3205 /* Return the value in the proper mode for this function. */
3206 if (GET_MODE (result) == target_mode)
3207 target = result;
3208 else if (target != 0)
3209 convert_move (target, result, 0);
3210 else
3211 target = convert_to_mode (target_mode, result, 0);
3213 return target;
3217 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3218 caller should emit a normal call, otherwise try to get the result
3219 in TARGET, if convenient (and in mode MODE if that's convenient). */
3221 static rtx
3222 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3224 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3226 tree type = TREE_TYPE (exp);
3227 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3228 CALL_EXPR_ARG (exp, 1), type);
3229 if (result)
3230 return expand_expr (result, target, mode, EXPAND_NORMAL);
3232 return NULL_RTX;
3235 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3236 caller should emit a normal call, otherwise try to get the result
3237 in TARGET, if convenient (and in mode MODE if that's convenient). */
3239 static rtx
3240 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3242 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3244 tree type = TREE_TYPE (exp);
3245 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3246 CALL_EXPR_ARG (exp, 1), type);
3247 if (result)
3248 return expand_expr (result, target, mode, EXPAND_NORMAL);
3250 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3252 return NULL_RTX;
3255 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3256 caller should emit a normal call, otherwise try to get the result
3257 in TARGET, if convenient (and in mode MODE if that's convenient). */
3259 static rtx
3260 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3262 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3264 tree type = TREE_TYPE (exp);
3265 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3266 CALL_EXPR_ARG (exp, 1), type);
3267 if (result)
3268 return expand_expr (result, target, mode, EXPAND_NORMAL);
3270 return NULL_RTX;
3273 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3274 caller should emit a normal call, otherwise try to get the result
3275 in TARGET, if convenient (and in mode MODE if that's convenient). */
3277 static rtx
3278 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3280 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3282 tree type = TREE_TYPE (exp);
3283 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3284 CALL_EXPR_ARG (exp, 1), type);
3285 if (result)
3286 return expand_expr (result, target, mode, EXPAND_NORMAL);
3288 return NULL_RTX;
3291 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3292 bytes from constant string DATA + OFFSET and return it as target
3293 constant. */
3295 static rtx
3296 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3297 enum machine_mode mode)
3299 const char *str = (const char *) data;
3301 gcc_assert (offset >= 0
3302 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3303 <= strlen (str) + 1));
3305 return c_readstr (str + offset, mode);
3308 /* Expand a call EXP to the memcpy builtin.
3309 Return NULL_RTX if we failed, the caller should emit a normal call,
3310 otherwise try to get the result in TARGET, if convenient (and in
3311 mode MODE if that's convenient). */
3313 static rtx
3314 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3316 tree fndecl = get_callee_fndecl (exp);
3318 if (!validate_arglist (exp,
3319 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3320 return NULL_RTX;
3321 else
3323 tree dest = CALL_EXPR_ARG (exp, 0);
3324 tree src = CALL_EXPR_ARG (exp, 1);
3325 tree len = CALL_EXPR_ARG (exp, 2);
3326 const char *src_str;
3327 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3328 unsigned int dest_align
3329 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3330 rtx dest_mem, src_mem, dest_addr, len_rtx;
3331 tree result = fold_builtin_memory_op (dest, src, len,
3332 TREE_TYPE (TREE_TYPE (fndecl)),
3333 false, /*endp=*/0);
3334 HOST_WIDE_INT expected_size = -1;
3335 unsigned int expected_align = 0;
3336 tree_ann_common_t ann;
3338 if (result)
3340 while (TREE_CODE (result) == COMPOUND_EXPR)
3342 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3343 EXPAND_NORMAL);
3344 result = TREE_OPERAND (result, 1);
3346 return expand_expr (result, target, mode, EXPAND_NORMAL);
3349 /* If DEST is not a pointer type, call the normal function. */
3350 if (dest_align == 0)
3351 return NULL_RTX;
3353 /* If either SRC is not a pointer type, don't do this
3354 operation in-line. */
3355 if (src_align == 0)
3356 return NULL_RTX;
3358 ann = tree_common_ann (exp);
3359 if (ann)
3360 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3362 if (expected_align < dest_align)
3363 expected_align = dest_align;
3364 dest_mem = get_memory_rtx (dest, len);
3365 set_mem_align (dest_mem, dest_align);
3366 len_rtx = expand_normal (len);
3367 src_str = c_getstr (src);
3369 /* If SRC is a string constant and block move would be done
3370 by pieces, we can avoid loading the string from memory
3371 and only stored the computed constants. */
3372 if (src_str
3373 && GET_CODE (len_rtx) == CONST_INT
3374 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3375 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3376 CONST_CAST (char *, src_str),
3377 dest_align, false))
3379 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3380 builtin_memcpy_read_str,
3381 CONST_CAST (char *, src_str),
3382 dest_align, false, 0);
3383 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3384 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3385 return dest_mem;
3388 src_mem = get_memory_rtx (src, len);
3389 set_mem_align (src_mem, src_align);
3391 /* Copy word part most expediently. */
3392 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3393 CALL_EXPR_TAILCALL (exp)
3394 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3395 expected_align, expected_size);
3397 if (dest_addr == 0)
3399 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3400 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3402 return dest_addr;
3406 /* Expand a call EXP to the mempcpy builtin.
3407 Return NULL_RTX if we failed; the caller should emit a normal call,
3408 otherwise try to get the result in TARGET, if convenient (and in
3409 mode MODE if that's convenient). If ENDP is 0 return the
3410 destination pointer, if ENDP is 1 return the end pointer ala
3411 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3412 stpcpy. */
3414 static rtx
3415 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3417 if (!validate_arglist (exp,
3418 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3419 return NULL_RTX;
3420 else
3422 tree dest = CALL_EXPR_ARG (exp, 0);
3423 tree src = CALL_EXPR_ARG (exp, 1);
3424 tree len = CALL_EXPR_ARG (exp, 2);
3425 return expand_builtin_mempcpy_args (dest, src, len,
3426 TREE_TYPE (exp),
3427 target, mode, /*endp=*/ 1);
3431 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3432 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3433 so that this can also be called without constructing an actual CALL_EXPR.
3434 TYPE is the return type of the call. The other arguments and return value
3435 are the same as for expand_builtin_mempcpy. */
3437 static rtx
3438 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3439 rtx target, enum machine_mode mode, int endp)
3441 /* If return value is ignored, transform mempcpy into memcpy. */
3442 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3444 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3445 tree result = build_call_expr (fn, 3, dest, src, len);
3447 while (TREE_CODE (result) == COMPOUND_EXPR)
3449 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3450 EXPAND_NORMAL);
3451 result = TREE_OPERAND (result, 1);
3453 return expand_expr (result, target, mode, EXPAND_NORMAL);
3455 else
3457 const char *src_str;
3458 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3459 unsigned int dest_align
3460 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3461 rtx dest_mem, src_mem, len_rtx;
3462 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3464 if (result)
3466 while (TREE_CODE (result) == COMPOUND_EXPR)
3468 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3469 EXPAND_NORMAL);
3470 result = TREE_OPERAND (result, 1);
3472 return expand_expr (result, target, mode, EXPAND_NORMAL);
3475 /* If either SRC or DEST is not a pointer type, don't do this
3476 operation in-line. */
3477 if (dest_align == 0 || src_align == 0)
3478 return NULL_RTX;
3480 /* If LEN is not constant, call the normal function. */
3481 if (! host_integerp (len, 1))
3482 return NULL_RTX;
3484 len_rtx = expand_normal (len);
3485 src_str = c_getstr (src);
3487 /* If SRC is a string constant and block move would be done
3488 by pieces, we can avoid loading the string from memory
3489 and only stored the computed constants. */
3490 if (src_str
3491 && GET_CODE (len_rtx) == CONST_INT
3492 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3493 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3494 CONST_CAST (char *, src_str),
3495 dest_align, false))
3497 dest_mem = get_memory_rtx (dest, len);
3498 set_mem_align (dest_mem, dest_align);
3499 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3500 builtin_memcpy_read_str,
3501 CONST_CAST (char *, src_str),
3502 dest_align, false, endp);
3503 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3504 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3505 return dest_mem;
3508 if (GET_CODE (len_rtx) == CONST_INT
3509 && can_move_by_pieces (INTVAL (len_rtx),
3510 MIN (dest_align, src_align)))
3512 dest_mem = get_memory_rtx (dest, len);
3513 set_mem_align (dest_mem, dest_align);
3514 src_mem = get_memory_rtx (src, len);
3515 set_mem_align (src_mem, src_align);
3516 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3517 MIN (dest_align, src_align), endp);
3518 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3519 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3520 return dest_mem;
3523 return NULL_RTX;
3527 /* Expand expression EXP, which is a call to the memmove builtin. Return
3528 NULL_RTX if we failed; the caller should emit a normal call. */
3530 static rtx
3531 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3533 if (!validate_arglist (exp,
3534 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3535 return NULL_RTX;
3536 else
3538 tree dest = CALL_EXPR_ARG (exp, 0);
3539 tree src = CALL_EXPR_ARG (exp, 1);
3540 tree len = CALL_EXPR_ARG (exp, 2);
3541 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3542 target, mode, ignore);
3546 /* Helper function to do the actual work for expand_builtin_memmove. The
3547 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3548 so that this can also be called without constructing an actual CALL_EXPR.
3549 TYPE is the return type of the call. The other arguments and return value
3550 are the same as for expand_builtin_memmove. */
3552 static rtx
3553 expand_builtin_memmove_args (tree dest, tree src, tree len,
3554 tree type, rtx target, enum machine_mode mode,
3555 int ignore)
3557 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3559 if (result)
3561 STRIP_TYPE_NOPS (result);
3562 while (TREE_CODE (result) == COMPOUND_EXPR)
3564 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3565 EXPAND_NORMAL);
3566 result = TREE_OPERAND (result, 1);
3568 return expand_expr (result, target, mode, EXPAND_NORMAL);
3571 /* Otherwise, call the normal function. */
3572 return NULL_RTX;
3575 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3576 NULL_RTX if we failed the caller should emit a normal call. */
3578 static rtx
3579 expand_builtin_bcopy (tree exp, int ignore)
3581 tree type = TREE_TYPE (exp);
3582 tree src, dest, size;
3584 if (!validate_arglist (exp,
3585 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3586 return NULL_RTX;
3588 src = CALL_EXPR_ARG (exp, 0);
3589 dest = CALL_EXPR_ARG (exp, 1);
3590 size = CALL_EXPR_ARG (exp, 2);
3592 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3593 This is done this way so that if it isn't expanded inline, we fall
3594 back to calling bcopy instead of memmove. */
3595 return expand_builtin_memmove_args (dest, src,
3596 fold_convert (sizetype, size),
3597 type, const0_rtx, VOIDmode,
3598 ignore);
3601 #ifndef HAVE_movstr
3602 # define HAVE_movstr 0
3603 # define CODE_FOR_movstr CODE_FOR_nothing
3604 #endif
3606 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3607 we failed, the caller should emit a normal call, otherwise try to
3608 get the result in TARGET, if convenient. If ENDP is 0 return the
3609 destination pointer, if ENDP is 1 return the end pointer ala
3610 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3611 stpcpy. */
3613 static rtx
3614 expand_movstr (tree dest, tree src, rtx target, int endp)
3616 rtx end;
3617 rtx dest_mem;
3618 rtx src_mem;
3619 rtx insn;
3620 const struct insn_data * data;
3622 if (!HAVE_movstr)
3623 return NULL_RTX;
3625 dest_mem = get_memory_rtx (dest, NULL);
3626 src_mem = get_memory_rtx (src, NULL);
3627 if (!endp)
3629 target = force_reg (Pmode, XEXP (dest_mem, 0));
3630 dest_mem = replace_equiv_address (dest_mem, target);
3631 end = gen_reg_rtx (Pmode);
3633 else
3635 if (target == 0 || target == const0_rtx)
3637 end = gen_reg_rtx (Pmode);
3638 if (target == 0)
3639 target = end;
3641 else
3642 end = target;
3645 data = insn_data + CODE_FOR_movstr;
3647 if (data->operand[0].mode != VOIDmode)
3648 end = gen_lowpart (data->operand[0].mode, end);
3650 insn = data->genfun (end, dest_mem, src_mem);
3652 gcc_assert (insn);
3654 emit_insn (insn);
3656 /* movstr is supposed to set end to the address of the NUL
3657 terminator. If the caller requested a mempcpy-like return value,
3658 adjust it. */
3659 if (endp == 1 && target != const0_rtx)
3661 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3662 emit_move_insn (target, force_operand (tem, NULL_RTX));
3665 return target;
3668 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3669 NULL_RTX if we failed the caller should emit a normal call, otherwise
3670 try to get the result in TARGET, if convenient (and in mode MODE if that's
3671 convenient). */
3673 static rtx
3674 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3676 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3678 tree dest = CALL_EXPR_ARG (exp, 0);
3679 tree src = CALL_EXPR_ARG (exp, 1);
3680 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3682 return NULL_RTX;
3685 /* Helper function to do the actual work for expand_builtin_strcpy. The
3686 arguments to the builtin_strcpy call DEST and SRC are broken out
3687 so that this can also be called without constructing an actual CALL_EXPR.
3688 The other arguments and return value are the same as for
3689 expand_builtin_strcpy. */
3691 static rtx
3692 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3693 rtx target, enum machine_mode mode)
3695 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3696 if (result)
3697 return expand_expr (result, target, mode, EXPAND_NORMAL);
3698 return expand_movstr (dest, src, target, /*endp=*/0);
3702 /* Expand a call EXP to the stpcpy builtin.
3703 Return NULL_RTX if we failed the caller should emit a normal call,
3704 otherwise try to get the result in TARGET, if convenient (and in
3705 mode MODE if that's convenient). */
3707 static rtx
3708 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3710 tree dst, src;
3712 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3713 return NULL_RTX;
3715 dst = CALL_EXPR_ARG (exp, 0);
3716 src = CALL_EXPR_ARG (exp, 1);
3718 /* If return value is ignored, transform stpcpy into strcpy. */
3719 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3721 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3722 tree result = build_call_expr (fn, 2, dst, src);
3724 STRIP_NOPS (result);
3725 while (TREE_CODE (result) == COMPOUND_EXPR)
3727 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3728 EXPAND_NORMAL);
3729 result = TREE_OPERAND (result, 1);
3731 return expand_expr (result, target, mode, EXPAND_NORMAL);
3733 else
3735 tree len, lenp1;
3736 rtx ret;
3738 /* Ensure we get an actual string whose length can be evaluated at
3739 compile-time, not an expression containing a string. This is
3740 because the latter will potentially produce pessimized code
3741 when used to produce the return value. */
3742 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3743 return expand_movstr (dst, src, target, /*endp=*/2);
3745 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3746 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3747 target, mode, /*endp=*/2);
3749 if (ret)
3750 return ret;
3752 if (TREE_CODE (len) == INTEGER_CST)
3754 rtx len_rtx = expand_normal (len);
3756 if (GET_CODE (len_rtx) == CONST_INT)
3758 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3759 dst, src, target, mode);
3761 if (ret)
3763 if (! target)
3765 if (mode != VOIDmode)
3766 target = gen_reg_rtx (mode);
3767 else
3768 target = gen_reg_rtx (GET_MODE (ret));
3770 if (GET_MODE (target) != GET_MODE (ret))
3771 ret = gen_lowpart (GET_MODE (target), ret);
3773 ret = plus_constant (ret, INTVAL (len_rtx));
3774 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3775 gcc_assert (ret);
3777 return target;
3782 return expand_movstr (dst, src, target, /*endp=*/2);
3786 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3787 bytes from constant string DATA + OFFSET and return it as target
3788 constant. */
3791 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3792 enum machine_mode mode)
3794 const char *str = (const char *) data;
3796 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3797 return const0_rtx;
3799 return c_readstr (str + offset, mode);
3802 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3803 NULL_RTX if we failed the caller should emit a normal call. */
3805 static rtx
3806 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3808 tree fndecl = get_callee_fndecl (exp);
3810 if (validate_arglist (exp,
3811 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3813 tree dest = CALL_EXPR_ARG (exp, 0);
3814 tree src = CALL_EXPR_ARG (exp, 1);
3815 tree len = CALL_EXPR_ARG (exp, 2);
3816 tree slen = c_strlen (src, 1);
3817 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3819 if (result)
3821 while (TREE_CODE (result) == COMPOUND_EXPR)
3823 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3824 EXPAND_NORMAL);
3825 result = TREE_OPERAND (result, 1);
3827 return expand_expr (result, target, mode, EXPAND_NORMAL);
3830 /* We must be passed a constant len and src parameter. */
3831 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3832 return NULL_RTX;
3834 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3836 /* We're required to pad with trailing zeros if the requested
3837 len is greater than strlen(s2)+1. In that case try to
3838 use store_by_pieces, if it fails, punt. */
3839 if (tree_int_cst_lt (slen, len))
3841 unsigned int dest_align
3842 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3843 const char *p = c_getstr (src);
3844 rtx dest_mem;
3846 if (!p || dest_align == 0 || !host_integerp (len, 1)
3847 || !can_store_by_pieces (tree_low_cst (len, 1),
3848 builtin_strncpy_read_str,
3849 CONST_CAST (char *, p),
3850 dest_align, false))
3851 return NULL_RTX;
3853 dest_mem = get_memory_rtx (dest, len);
3854 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3855 builtin_strncpy_read_str,
3856 CONST_CAST (char *, p), dest_align, false, 0);
3857 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3858 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3859 return dest_mem;
3862 return NULL_RTX;
3865 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3866 bytes from constant string DATA + OFFSET and return it as target
3867 constant. */
3870 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3871 enum machine_mode mode)
3873 const char *c = (const char *) data;
3874 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3876 memset (p, *c, GET_MODE_SIZE (mode));
3878 return c_readstr (p, mode);
3881 /* Callback routine for store_by_pieces. Return the RTL of a register
3882 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3883 char value given in the RTL register data. For example, if mode is
3884 4 bytes wide, return the RTL for 0x01010101*data. */
3886 static rtx
3887 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3888 enum machine_mode mode)
3890 rtx target, coeff;
3891 size_t size;
3892 char *p;
3894 size = GET_MODE_SIZE (mode);
3895 if (size == 1)
3896 return (rtx) data;
3898 p = XALLOCAVEC (char, size);
3899 memset (p, 1, size);
3900 coeff = c_readstr (p, mode);
3902 target = convert_to_mode (mode, (rtx) data, 1);
3903 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3904 return force_reg (mode, target);
3907 /* Expand expression EXP, which is a call to the memset builtin. Return
3908 NULL_RTX if we failed the caller should emit a normal call, otherwise
3909 try to get the result in TARGET, if convenient (and in mode MODE if that's
3910 convenient). */
3912 static rtx
3913 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3915 if (!validate_arglist (exp,
3916 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3917 return NULL_RTX;
3918 else
3920 tree dest = CALL_EXPR_ARG (exp, 0);
3921 tree val = CALL_EXPR_ARG (exp, 1);
3922 tree len = CALL_EXPR_ARG (exp, 2);
3923 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3927 /* Helper function to do the actual work for expand_builtin_memset. The
3928 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3929 so that this can also be called without constructing an actual CALL_EXPR.
3930 The other arguments and return value are the same as for
3931 expand_builtin_memset. */
3933 static rtx
3934 expand_builtin_memset_args (tree dest, tree val, tree len,
3935 rtx target, enum machine_mode mode, tree orig_exp)
3937 tree fndecl, fn;
3938 enum built_in_function fcode;
3939 char c;
3940 unsigned int dest_align;
3941 rtx dest_mem, dest_addr, len_rtx;
3942 HOST_WIDE_INT expected_size = -1;
3943 unsigned int expected_align = 0;
3944 tree_ann_common_t ann;
3946 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3948 /* If DEST is not a pointer type, don't do this operation in-line. */
3949 if (dest_align == 0)
3950 return NULL_RTX;
3952 ann = tree_common_ann (orig_exp);
3953 if (ann)
3954 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3956 if (expected_align < dest_align)
3957 expected_align = dest_align;
3959 /* If the LEN parameter is zero, return DEST. */
3960 if (integer_zerop (len))
3962 /* Evaluate and ignore VAL in case it has side-effects. */
3963 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3964 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3967 /* Stabilize the arguments in case we fail. */
3968 dest = builtin_save_expr (dest);
3969 val = builtin_save_expr (val);
3970 len = builtin_save_expr (len);
3972 len_rtx = expand_normal (len);
3973 dest_mem = get_memory_rtx (dest, len);
3975 if (TREE_CODE (val) != INTEGER_CST)
3977 rtx val_rtx;
3979 val_rtx = expand_normal (val);
3980 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3981 val_rtx, 0);
3983 /* Assume that we can memset by pieces if we can store
3984 * the coefficients by pieces (in the required modes).
3985 * We can't pass builtin_memset_gen_str as that emits RTL. */
3986 c = 1;
3987 if (host_integerp (len, 1)
3988 && can_store_by_pieces (tree_low_cst (len, 1),
3989 builtin_memset_read_str, &c, dest_align,
3990 true))
3992 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3993 val_rtx);
3994 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3995 builtin_memset_gen_str, val_rtx, dest_align,
3996 true, 0);
3998 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3999 dest_align, expected_align,
4000 expected_size))
4001 goto do_libcall;
4003 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4004 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4005 return dest_mem;
4008 if (target_char_cast (val, &c))
4009 goto do_libcall;
4011 if (c)
4013 if (host_integerp (len, 1)
4014 && can_store_by_pieces (tree_low_cst (len, 1),
4015 builtin_memset_read_str, &c, dest_align,
4016 true))
4017 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4018 builtin_memset_read_str, &c, dest_align, true, 0);
4019 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4020 dest_align, expected_align,
4021 expected_size))
4022 goto do_libcall;
4024 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4025 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4026 return dest_mem;
4029 set_mem_align (dest_mem, dest_align);
4030 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4031 CALL_EXPR_TAILCALL (orig_exp)
4032 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4033 expected_align, expected_size);
4035 if (dest_addr == 0)
4037 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4038 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4041 return dest_addr;
4043 do_libcall:
4044 fndecl = get_callee_fndecl (orig_exp);
4045 fcode = DECL_FUNCTION_CODE (fndecl);
4046 if (fcode == BUILT_IN_MEMSET)
4047 fn = build_call_expr (fndecl, 3, dest, val, len);
4048 else if (fcode == BUILT_IN_BZERO)
4049 fn = build_call_expr (fndecl, 2, dest, len);
4050 else
4051 gcc_unreachable ();
4052 if (TREE_CODE (fn) == CALL_EXPR)
4053 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4054 return expand_call (fn, target, target == const0_rtx);
4057 /* Expand expression EXP, which is a call to the bzero builtin. Return
4058 NULL_RTX if we failed the caller should emit a normal call. */
4060 static rtx
4061 expand_builtin_bzero (tree exp)
4063 tree dest, size;
4065 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4066 return NULL_RTX;
4068 dest = CALL_EXPR_ARG (exp, 0);
4069 size = CALL_EXPR_ARG (exp, 1);
4071 /* New argument list transforming bzero(ptr x, int y) to
4072 memset(ptr x, int 0, size_t y). This is done this way
4073 so that if it isn't expanded inline, we fallback to
4074 calling bzero instead of memset. */
4076 return expand_builtin_memset_args (dest, integer_zero_node,
4077 fold_convert (sizetype, size),
4078 const0_rtx, VOIDmode, exp);
4081 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4082 caller should emit a normal call, otherwise try to get the result
4083 in TARGET, if convenient (and in mode MODE if that's convenient). */
4085 static rtx
4086 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4088 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4089 INTEGER_TYPE, VOID_TYPE))
4091 tree type = TREE_TYPE (exp);
4092 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4093 CALL_EXPR_ARG (exp, 1),
4094 CALL_EXPR_ARG (exp, 2), type);
4095 if (result)
4096 return expand_expr (result, target, mode, EXPAND_NORMAL);
4098 return NULL_RTX;
4101 /* Expand expression EXP, which is a call to the memcmp built-in function.
4102 Return NULL_RTX if we failed and the
4103 caller should emit a normal call, otherwise try to get the result in
4104 TARGET, if convenient (and in mode MODE, if that's convenient). */
4106 static rtx
4107 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4109 if (!validate_arglist (exp,
4110 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4111 return NULL_RTX;
4112 else
4114 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4115 CALL_EXPR_ARG (exp, 1),
4116 CALL_EXPR_ARG (exp, 2));
4117 if (result)
4118 return expand_expr (result, target, mode, EXPAND_NORMAL);
4121 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4123 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4124 rtx result;
4125 rtx insn;
4126 tree arg1 = CALL_EXPR_ARG (exp, 0);
4127 tree arg2 = CALL_EXPR_ARG (exp, 1);
4128 tree len = CALL_EXPR_ARG (exp, 2);
4130 int arg1_align
4131 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4132 int arg2_align
4133 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4134 enum machine_mode insn_mode;
4136 #ifdef HAVE_cmpmemsi
4137 if (HAVE_cmpmemsi)
4138 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4139 else
4140 #endif
4141 #ifdef HAVE_cmpstrnsi
4142 if (HAVE_cmpstrnsi)
4143 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4144 else
4145 #endif
4146 return NULL_RTX;
4148 /* If we don't have POINTER_TYPE, call the function. */
4149 if (arg1_align == 0 || arg2_align == 0)
4150 return NULL_RTX;
4152 /* Make a place to write the result of the instruction. */
4153 result = target;
4154 if (! (result != 0
4155 && REG_P (result) && GET_MODE (result) == insn_mode
4156 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4157 result = gen_reg_rtx (insn_mode);
4159 arg1_rtx = get_memory_rtx (arg1, len);
4160 arg2_rtx = get_memory_rtx (arg2, len);
4161 arg3_rtx = expand_normal (len);
4163 /* Set MEM_SIZE as appropriate. */
4164 if (GET_CODE (arg3_rtx) == CONST_INT)
4166 set_mem_size (arg1_rtx, arg3_rtx);
4167 set_mem_size (arg2_rtx, arg3_rtx);
4170 #ifdef HAVE_cmpmemsi
4171 if (HAVE_cmpmemsi)
4172 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4173 GEN_INT (MIN (arg1_align, arg2_align)));
4174 else
4175 #endif
4176 #ifdef HAVE_cmpstrnsi
4177 if (HAVE_cmpstrnsi)
4178 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4179 GEN_INT (MIN (arg1_align, arg2_align)));
4180 else
4181 #endif
4182 gcc_unreachable ();
4184 if (insn)
4185 emit_insn (insn);
4186 else
4187 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4188 TYPE_MODE (integer_type_node), 3,
4189 XEXP (arg1_rtx, 0), Pmode,
4190 XEXP (arg2_rtx, 0), Pmode,
4191 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4192 TYPE_UNSIGNED (sizetype)),
4193 TYPE_MODE (sizetype));
4195 /* Return the value in the proper mode for this function. */
4196 mode = TYPE_MODE (TREE_TYPE (exp));
4197 if (GET_MODE (result) == mode)
4198 return result;
4199 else if (target != 0)
4201 convert_move (target, result, 0);
4202 return target;
4204 else
4205 return convert_to_mode (mode, result, 0);
4207 #endif
4209 return NULL_RTX;
4212 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4213 if we failed the caller should emit a normal call, otherwise try to get
4214 the result in TARGET, if convenient. */
4216 static rtx
4217 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4219 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4220 return NULL_RTX;
4221 else
4223 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4224 CALL_EXPR_ARG (exp, 1));
4225 if (result)
4226 return expand_expr (result, target, mode, EXPAND_NORMAL);
4229 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4230 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4231 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4233 rtx arg1_rtx, arg2_rtx;
4234 rtx result, insn = NULL_RTX;
4235 tree fndecl, fn;
4236 tree arg1 = CALL_EXPR_ARG (exp, 0);
4237 tree arg2 = CALL_EXPR_ARG (exp, 1);
4239 int arg1_align
4240 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4241 int arg2_align
4242 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4244 /* If we don't have POINTER_TYPE, call the function. */
4245 if (arg1_align == 0 || arg2_align == 0)
4246 return NULL_RTX;
4248 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4249 arg1 = builtin_save_expr (arg1);
4250 arg2 = builtin_save_expr (arg2);
4252 arg1_rtx = get_memory_rtx (arg1, NULL);
4253 arg2_rtx = get_memory_rtx (arg2, NULL);
4255 #ifdef HAVE_cmpstrsi
4256 /* Try to call cmpstrsi. */
4257 if (HAVE_cmpstrsi)
4259 enum machine_mode insn_mode
4260 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4262 /* Make a place to write the result of the instruction. */
4263 result = target;
4264 if (! (result != 0
4265 && REG_P (result) && GET_MODE (result) == insn_mode
4266 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4267 result = gen_reg_rtx (insn_mode);
4269 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4270 GEN_INT (MIN (arg1_align, arg2_align)));
4272 #endif
4273 #ifdef HAVE_cmpstrnsi
4274 /* Try to determine at least one length and call cmpstrnsi. */
4275 if (!insn && HAVE_cmpstrnsi)
4277 tree len;
4278 rtx arg3_rtx;
4280 enum machine_mode insn_mode
4281 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4282 tree len1 = c_strlen (arg1, 1);
4283 tree len2 = c_strlen (arg2, 1);
4285 if (len1)
4286 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4287 if (len2)
4288 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4290 /* If we don't have a constant length for the first, use the length
4291 of the second, if we know it. We don't require a constant for
4292 this case; some cost analysis could be done if both are available
4293 but neither is constant. For now, assume they're equally cheap,
4294 unless one has side effects. If both strings have constant lengths,
4295 use the smaller. */
4297 if (!len1)
4298 len = len2;
4299 else if (!len2)
4300 len = len1;
4301 else if (TREE_SIDE_EFFECTS (len1))
4302 len = len2;
4303 else if (TREE_SIDE_EFFECTS (len2))
4304 len = len1;
4305 else if (TREE_CODE (len1) != INTEGER_CST)
4306 len = len2;
4307 else if (TREE_CODE (len2) != INTEGER_CST)
4308 len = len1;
4309 else if (tree_int_cst_lt (len1, len2))
4310 len = len1;
4311 else
4312 len = len2;
4314 /* If both arguments have side effects, we cannot optimize. */
4315 if (!len || TREE_SIDE_EFFECTS (len))
4316 goto do_libcall;
4318 arg3_rtx = expand_normal (len);
4320 /* Make a place to write the result of the instruction. */
4321 result = target;
4322 if (! (result != 0
4323 && REG_P (result) && GET_MODE (result) == insn_mode
4324 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4325 result = gen_reg_rtx (insn_mode);
4327 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4328 GEN_INT (MIN (arg1_align, arg2_align)));
4330 #endif
4332 if (insn)
4334 emit_insn (insn);
4336 /* Return the value in the proper mode for this function. */
4337 mode = TYPE_MODE (TREE_TYPE (exp));
4338 if (GET_MODE (result) == mode)
4339 return result;
4340 if (target == 0)
4341 return convert_to_mode (mode, result, 0);
4342 convert_move (target, result, 0);
4343 return target;
4346 /* Expand the library call ourselves using a stabilized argument
4347 list to avoid re-evaluating the function's arguments twice. */
4348 #ifdef HAVE_cmpstrnsi
4349 do_libcall:
4350 #endif
4351 fndecl = get_callee_fndecl (exp);
4352 fn = build_call_expr (fndecl, 2, arg1, arg2);
4353 if (TREE_CODE (fn) == CALL_EXPR)
4354 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4355 return expand_call (fn, target, target == const0_rtx);
4357 #endif
4358 return NULL_RTX;
4361 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4362 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4363 the result in TARGET, if convenient. */
4365 static rtx
4366 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4368 if (!validate_arglist (exp,
4369 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4370 return NULL_RTX;
4371 else
4373 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4374 CALL_EXPR_ARG (exp, 1),
4375 CALL_EXPR_ARG (exp, 2));
4376 if (result)
4377 return expand_expr (result, target, mode, EXPAND_NORMAL);
4380 /* If c_strlen can determine an expression for one of the string
4381 lengths, and it doesn't have side effects, then emit cmpstrnsi
4382 using length MIN(strlen(string)+1, arg3). */
4383 #ifdef HAVE_cmpstrnsi
4384 if (HAVE_cmpstrnsi)
4386 tree len, len1, len2;
4387 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4388 rtx result, insn;
4389 tree fndecl, fn;
4390 tree arg1 = CALL_EXPR_ARG (exp, 0);
4391 tree arg2 = CALL_EXPR_ARG (exp, 1);
4392 tree arg3 = CALL_EXPR_ARG (exp, 2);
4394 int arg1_align
4395 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4396 int arg2_align
4397 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4398 enum machine_mode insn_mode
4399 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4401 len1 = c_strlen (arg1, 1);
4402 len2 = c_strlen (arg2, 1);
4404 if (len1)
4405 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4406 if (len2)
4407 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4409 /* If we don't have a constant length for the first, use the length
4410 of the second, if we know it. We don't require a constant for
4411 this case; some cost analysis could be done if both are available
4412 but neither is constant. For now, assume they're equally cheap,
4413 unless one has side effects. If both strings have constant lengths,
4414 use the smaller. */
4416 if (!len1)
4417 len = len2;
4418 else if (!len2)
4419 len = len1;
4420 else if (TREE_SIDE_EFFECTS (len1))
4421 len = len2;
4422 else if (TREE_SIDE_EFFECTS (len2))
4423 len = len1;
4424 else if (TREE_CODE (len1) != INTEGER_CST)
4425 len = len2;
4426 else if (TREE_CODE (len2) != INTEGER_CST)
4427 len = len1;
4428 else if (tree_int_cst_lt (len1, len2))
4429 len = len1;
4430 else
4431 len = len2;
4433 /* If both arguments have side effects, we cannot optimize. */
4434 if (!len || TREE_SIDE_EFFECTS (len))
4435 return NULL_RTX;
4437 /* The actual new length parameter is MIN(len,arg3). */
4438 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4439 fold_convert (TREE_TYPE (len), arg3));
4441 /* If we don't have POINTER_TYPE, call the function. */
4442 if (arg1_align == 0 || arg2_align == 0)
4443 return NULL_RTX;
4445 /* Make a place to write the result of the instruction. */
4446 result = target;
4447 if (! (result != 0
4448 && REG_P (result) && GET_MODE (result) == insn_mode
4449 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4450 result = gen_reg_rtx (insn_mode);
4452 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4453 arg1 = builtin_save_expr (arg1);
4454 arg2 = builtin_save_expr (arg2);
4455 len = builtin_save_expr (len);
4457 arg1_rtx = get_memory_rtx (arg1, len);
4458 arg2_rtx = get_memory_rtx (arg2, len);
4459 arg3_rtx = expand_normal (len);
4460 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4461 GEN_INT (MIN (arg1_align, arg2_align)));
4462 if (insn)
4464 emit_insn (insn);
4466 /* Return the value in the proper mode for this function. */
4467 mode = TYPE_MODE (TREE_TYPE (exp));
4468 if (GET_MODE (result) == mode)
4469 return result;
4470 if (target == 0)
4471 return convert_to_mode (mode, result, 0);
4472 convert_move (target, result, 0);
4473 return target;
4476 /* Expand the library call ourselves using a stabilized argument
4477 list to avoid re-evaluating the function's arguments twice. */
4478 fndecl = get_callee_fndecl (exp);
4479 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4480 if (TREE_CODE (fn) == CALL_EXPR)
4481 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4482 return expand_call (fn, target, target == const0_rtx);
4484 #endif
4485 return NULL_RTX;
4488 /* Expand expression EXP, which is a call to the strcat builtin.
4489 Return NULL_RTX if we failed the caller should emit a normal call,
4490 otherwise try to get the result in TARGET, if convenient. */
4492 static rtx
4493 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4495 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4496 return NULL_RTX;
4497 else
4499 tree dst = CALL_EXPR_ARG (exp, 0);
4500 tree src = CALL_EXPR_ARG (exp, 1);
4501 const char *p = c_getstr (src);
4503 /* If the string length is zero, return the dst parameter. */
4504 if (p && *p == '\0')
4505 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4507 if (optimize_insn_for_speed_p ())
4509 /* See if we can store by pieces into (dst + strlen(dst)). */
4510 tree newsrc, newdst,
4511 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4512 rtx insns;
4514 /* Stabilize the argument list. */
4515 newsrc = builtin_save_expr (src);
4516 dst = builtin_save_expr (dst);
4518 start_sequence ();
4520 /* Create strlen (dst). */
4521 newdst = build_call_expr (strlen_fn, 1, dst);
4522 /* Create (dst p+ strlen (dst)). */
4524 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4525 newdst = builtin_save_expr (newdst);
4527 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4529 end_sequence (); /* Stop sequence. */
4530 return NULL_RTX;
4533 /* Output the entire sequence. */
4534 insns = get_insns ();
4535 end_sequence ();
4536 emit_insn (insns);
4538 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4541 return NULL_RTX;
4545 /* Expand expression EXP, which is a call to the strncat builtin.
4546 Return NULL_RTX if we failed the caller should emit a normal call,
4547 otherwise try to get the result in TARGET, if convenient. */
4549 static rtx
4550 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4552 if (validate_arglist (exp,
4553 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4555 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4556 CALL_EXPR_ARG (exp, 1),
4557 CALL_EXPR_ARG (exp, 2));
4558 if (result)
4559 return expand_expr (result, target, mode, EXPAND_NORMAL);
4561 return NULL_RTX;
4564 /* Expand expression EXP, which is a call to the strspn builtin.
4565 Return NULL_RTX if we failed the caller should emit a normal call,
4566 otherwise try to get the result in TARGET, if convenient. */
4568 static rtx
4569 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4571 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4573 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4574 CALL_EXPR_ARG (exp, 1));
4575 if (result)
4576 return expand_expr (result, target, mode, EXPAND_NORMAL);
4578 return NULL_RTX;
4581 /* Expand expression EXP, which is a call to the strcspn builtin.
4582 Return NULL_RTX if we failed the caller should emit a normal call,
4583 otherwise try to get the result in TARGET, if convenient. */
4585 static rtx
4586 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4588 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4590 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4591 CALL_EXPR_ARG (exp, 1));
4592 if (result)
4593 return expand_expr (result, target, mode, EXPAND_NORMAL);
4595 return NULL_RTX;
4598 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4599 if that's convenient. */
4602 expand_builtin_saveregs (void)
4604 rtx val, seq;
4606 /* Don't do __builtin_saveregs more than once in a function.
4607 Save the result of the first call and reuse it. */
4608 if (saveregs_value != 0)
4609 return saveregs_value;
4611 /* When this function is called, it means that registers must be
4612 saved on entry to this function. So we migrate the call to the
4613 first insn of this function. */
4615 start_sequence ();
4617 /* Do whatever the machine needs done in this case. */
4618 val = targetm.calls.expand_builtin_saveregs ();
4620 seq = get_insns ();
4621 end_sequence ();
4623 saveregs_value = val;
4625 /* Put the insns after the NOTE that starts the function. If this
4626 is inside a start_sequence, make the outer-level insn chain current, so
4627 the code is placed at the start of the function. */
4628 push_topmost_sequence ();
4629 emit_insn_after (seq, entry_of_function ());
4630 pop_topmost_sequence ();
4632 return val;
4635 /* __builtin_args_info (N) returns word N of the arg space info
4636 for the current function. The number and meanings of words
4637 is controlled by the definition of CUMULATIVE_ARGS. */
4639 static rtx
4640 expand_builtin_args_info (tree exp)
4642 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4643 int *word_ptr = (int *) &crtl->args.info;
4645 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4647 if (call_expr_nargs (exp) != 0)
4649 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4650 error ("argument of %<__builtin_args_info%> must be constant");
4651 else
4653 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4655 if (wordnum < 0 || wordnum >= nwords)
4656 error ("argument of %<__builtin_args_info%> out of range");
4657 else
4658 return GEN_INT (word_ptr[wordnum]);
4661 else
4662 error ("missing argument in %<__builtin_args_info%>");
4664 return const0_rtx;
4667 /* Expand a call to __builtin_next_arg. */
4669 static rtx
4670 expand_builtin_next_arg (void)
4672 /* Checking arguments is already done in fold_builtin_next_arg
4673 that must be called before this function. */
4674 return expand_binop (ptr_mode, add_optab,
4675 crtl->args.internal_arg_pointer,
4676 crtl->args.arg_offset_rtx,
4677 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4680 /* Make it easier for the backends by protecting the valist argument
4681 from multiple evaluations. */
4683 static tree
4684 stabilize_va_list (tree valist, int needs_lvalue)
4686 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4688 gcc_assert (vatype != NULL_TREE);
4690 if (TREE_CODE (vatype) == ARRAY_TYPE)
4692 if (TREE_SIDE_EFFECTS (valist))
4693 valist = save_expr (valist);
4695 /* For this case, the backends will be expecting a pointer to
4696 vatype, but it's possible we've actually been given an array
4697 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4698 So fix it. */
4699 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4701 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4702 valist = build_fold_addr_expr_with_type (valist, p1);
4705 else
4707 tree pt;
4709 if (! needs_lvalue)
4711 if (! TREE_SIDE_EFFECTS (valist))
4712 return valist;
4714 pt = build_pointer_type (vatype);
4715 valist = fold_build1 (ADDR_EXPR, pt, valist);
4716 TREE_SIDE_EFFECTS (valist) = 1;
4719 if (TREE_SIDE_EFFECTS (valist))
4720 valist = save_expr (valist);
4721 valist = build_fold_indirect_ref (valist);
4724 return valist;
4727 /* The "standard" definition of va_list is void*. */
4729 tree
4730 std_build_builtin_va_list (void)
4732 return ptr_type_node;
4735 /* The "standard" abi va_list is va_list_type_node. */
4737 tree
4738 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4740 return va_list_type_node;
4743 /* The "standard" type of va_list is va_list_type_node. */
4745 tree
4746 std_canonical_va_list_type (tree type)
4748 tree wtype, htype;
4750 if (INDIRECT_REF_P (type))
4751 type = TREE_TYPE (type);
4752 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4753 type = TREE_TYPE (type);
4754 wtype = va_list_type_node;
4755 htype = type;
4756 /* Treat structure va_list types. */
4757 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4758 htype = TREE_TYPE (htype);
4759 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4761 /* If va_list is an array type, the argument may have decayed
4762 to a pointer type, e.g. by being passed to another function.
4763 In that case, unwrap both types so that we can compare the
4764 underlying records. */
4765 if (TREE_CODE (htype) == ARRAY_TYPE
4766 || POINTER_TYPE_P (htype))
4768 wtype = TREE_TYPE (wtype);
4769 htype = TREE_TYPE (htype);
4772 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4773 return va_list_type_node;
4775 return NULL_TREE;
4778 /* The "standard" implementation of va_start: just assign `nextarg' to
4779 the variable. */
4781 void
4782 std_expand_builtin_va_start (tree valist, rtx nextarg)
4784 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4785 convert_move (va_r, nextarg, 0);
4788 /* Expand EXP, a call to __builtin_va_start. */
4790 static rtx
4791 expand_builtin_va_start (tree exp)
4793 rtx nextarg;
4794 tree valist;
4796 if (call_expr_nargs (exp) < 2)
4798 error ("too few arguments to function %<va_start%>");
4799 return const0_rtx;
4802 if (fold_builtin_next_arg (exp, true))
4803 return const0_rtx;
4805 nextarg = expand_builtin_next_arg ();
4806 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4808 if (targetm.expand_builtin_va_start)
4809 targetm.expand_builtin_va_start (valist, nextarg);
4810 else
4811 std_expand_builtin_va_start (valist, nextarg);
4813 return const0_rtx;
4816 /* The "standard" implementation of va_arg: read the value from the
4817 current (padded) address and increment by the (padded) size. */
4819 tree
4820 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4821 gimple_seq *post_p)
4823 tree addr, t, type_size, rounded_size, valist_tmp;
4824 unsigned HOST_WIDE_INT align, boundary;
4825 bool indirect;
4827 #ifdef ARGS_GROW_DOWNWARD
4828 /* All of the alignment and movement below is for args-grow-up machines.
4829 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4830 implement their own specialized gimplify_va_arg_expr routines. */
4831 gcc_unreachable ();
4832 #endif
4834 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4835 if (indirect)
4836 type = build_pointer_type (type);
4838 align = PARM_BOUNDARY / BITS_PER_UNIT;
4839 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4841 /* When we align parameter on stack for caller, if the parameter
4842 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4843 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4844 here with caller. */
4845 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4846 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4848 boundary /= BITS_PER_UNIT;
4850 /* Hoist the valist value into a temporary for the moment. */
4851 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4853 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4854 requires greater alignment, we must perform dynamic alignment. */
4855 if (boundary > align
4856 && !integer_zerop (TYPE_SIZE (type)))
4858 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4859 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4860 valist_tmp, size_int (boundary - 1)));
4861 gimplify_and_add (t, pre_p);
4863 t = fold_convert (sizetype, valist_tmp);
4864 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4865 fold_convert (TREE_TYPE (valist),
4866 fold_build2 (BIT_AND_EXPR, sizetype, t,
4867 size_int (-boundary))));
4868 gimplify_and_add (t, pre_p);
4870 else
4871 boundary = align;
4873 /* If the actual alignment is less than the alignment of the type,
4874 adjust the type accordingly so that we don't assume strict alignment
4875 when dereferencing the pointer. */
4876 boundary *= BITS_PER_UNIT;
4877 if (boundary < TYPE_ALIGN (type))
4879 type = build_variant_type_copy (type);
4880 TYPE_ALIGN (type) = boundary;
4883 /* Compute the rounded size of the type. */
4884 type_size = size_in_bytes (type);
4885 rounded_size = round_up (type_size, align);
4887 /* Reduce rounded_size so it's sharable with the postqueue. */
4888 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4890 /* Get AP. */
4891 addr = valist_tmp;
4892 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4894 /* Small args are padded downward. */
4895 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4896 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4897 size_binop (MINUS_EXPR, rounded_size, type_size));
4898 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4901 /* Compute new value for AP. */
4902 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4903 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4904 gimplify_and_add (t, pre_p);
4906 addr = fold_convert (build_pointer_type (type), addr);
4908 if (indirect)
4909 addr = build_va_arg_indirect_ref (addr);
4911 return build_va_arg_indirect_ref (addr);
4914 /* Build an indirect-ref expression over the given TREE, which represents a
4915 piece of a va_arg() expansion. */
4916 tree
4917 build_va_arg_indirect_ref (tree addr)
4919 addr = build_fold_indirect_ref (addr);
4921 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4922 mf_mark (addr);
4924 return addr;
4927 /* Return a dummy expression of type TYPE in order to keep going after an
4928 error. */
4930 static tree
4931 dummy_object (tree type)
4933 tree t = build_int_cst (build_pointer_type (type), 0);
4934 return build1 (INDIRECT_REF, type, t);
4937 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4938 builtin function, but a very special sort of operator. */
4940 enum gimplify_status
4941 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4943 tree promoted_type, have_va_type;
4944 tree valist = TREE_OPERAND (*expr_p, 0);
4945 tree type = TREE_TYPE (*expr_p);
4946 tree t;
4948 /* Verify that valist is of the proper type. */
4949 have_va_type = TREE_TYPE (valist);
4950 if (have_va_type == error_mark_node)
4951 return GS_ERROR;
4952 have_va_type = targetm.canonical_va_list_type (have_va_type);
4954 if (have_va_type == NULL_TREE)
4956 error ("first argument to %<va_arg%> not of type %<va_list%>");
4957 return GS_ERROR;
4960 /* Generate a diagnostic for requesting data of a type that cannot
4961 be passed through `...' due to type promotion at the call site. */
4962 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4963 != type)
4965 static bool gave_help;
4966 bool warned;
4968 /* Unfortunately, this is merely undefined, rather than a constraint
4969 violation, so we cannot make this an error. If this call is never
4970 executed, the program is still strictly conforming. */
4971 warned = warning (0, "%qT is promoted to %qT when passed through %<...%>",
4972 type, promoted_type);
4973 if (!gave_help && warned)
4975 gave_help = true;
4976 inform (input_location, "(so you should pass %qT not %qT to %<va_arg%>)",
4977 promoted_type, type);
4980 /* We can, however, treat "undefined" any way we please.
4981 Call abort to encourage the user to fix the program. */
4982 if (warned)
4983 inform (input_location, "if this code is reached, the program will abort");
4984 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4985 gimplify_and_add (t, pre_p);
4987 /* This is dead code, but go ahead and finish so that the
4988 mode of the result comes out right. */
4989 *expr_p = dummy_object (type);
4990 return GS_ALL_DONE;
4992 else
4994 /* Make it easier for the backends by protecting the valist argument
4995 from multiple evaluations. */
4996 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4998 /* For this case, the backends will be expecting a pointer to
4999 TREE_TYPE (abi), but it's possible we've
5000 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5001 So fix it. */
5002 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5004 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5005 valist = build_fold_addr_expr_with_type (valist, p1);
5008 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5010 else
5011 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5013 if (!targetm.gimplify_va_arg_expr)
5014 /* FIXME: Once most targets are converted we should merely
5015 assert this is non-null. */
5016 return GS_ALL_DONE;
5018 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5019 return GS_OK;
5023 /* Expand EXP, a call to __builtin_va_end. */
5025 static rtx
5026 expand_builtin_va_end (tree exp)
5028 tree valist = CALL_EXPR_ARG (exp, 0);
5030 /* Evaluate for side effects, if needed. I hate macros that don't
5031 do that. */
5032 if (TREE_SIDE_EFFECTS (valist))
5033 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5035 return const0_rtx;
5038 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5039 builtin rather than just as an assignment in stdarg.h because of the
5040 nastiness of array-type va_list types. */
5042 static rtx
5043 expand_builtin_va_copy (tree exp)
5045 tree dst, src, t;
5047 dst = CALL_EXPR_ARG (exp, 0);
5048 src = CALL_EXPR_ARG (exp, 1);
5050 dst = stabilize_va_list (dst, 1);
5051 src = stabilize_va_list (src, 0);
5053 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5055 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5057 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5058 TREE_SIDE_EFFECTS (t) = 1;
5059 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5061 else
5063 rtx dstb, srcb, size;
5065 /* Evaluate to pointers. */
5066 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5067 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5068 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5069 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5071 dstb = convert_memory_address (Pmode, dstb);
5072 srcb = convert_memory_address (Pmode, srcb);
5074 /* "Dereference" to BLKmode memories. */
5075 dstb = gen_rtx_MEM (BLKmode, dstb);
5076 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5077 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5078 srcb = gen_rtx_MEM (BLKmode, srcb);
5079 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5080 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5082 /* Copy. */
5083 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5086 return const0_rtx;
5089 /* Expand a call to one of the builtin functions __builtin_frame_address or
5090 __builtin_return_address. */
5092 static rtx
5093 expand_builtin_frame_address (tree fndecl, tree exp)
5095 /* The argument must be a nonnegative integer constant.
5096 It counts the number of frames to scan up the stack.
5097 The value is the return address saved in that frame. */
5098 if (call_expr_nargs (exp) == 0)
5099 /* Warning about missing arg was already issued. */
5100 return const0_rtx;
5101 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5103 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5104 error ("invalid argument to %<__builtin_frame_address%>");
5105 else
5106 error ("invalid argument to %<__builtin_return_address%>");
5107 return const0_rtx;
5109 else
5111 rtx tem
5112 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5113 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5115 /* Some ports cannot access arbitrary stack frames. */
5116 if (tem == NULL)
5118 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5119 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5120 else
5121 warning (0, "unsupported argument to %<__builtin_return_address%>");
5122 return const0_rtx;
5125 /* For __builtin_frame_address, return what we've got. */
5126 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5127 return tem;
5129 if (!REG_P (tem)
5130 && ! CONSTANT_P (tem))
5131 tem = copy_to_mode_reg (Pmode, tem);
5132 return tem;
5136 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5137 we failed and the caller should emit a normal call, otherwise try to get
5138 the result in TARGET, if convenient. */
5140 static rtx
5141 expand_builtin_alloca (tree exp, rtx target)
5143 rtx op0;
5144 rtx result;
5146 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5147 should always expand to function calls. These can be intercepted
5148 in libmudflap. */
5149 if (flag_mudflap)
5150 return NULL_RTX;
5152 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5153 return NULL_RTX;
5155 /* Compute the argument. */
5156 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5158 /* Allocate the desired space. */
5159 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5160 result = convert_memory_address (ptr_mode, result);
5162 return result;
5165 /* Expand a call to a bswap builtin with argument ARG0. MODE
5166 is the mode to expand with. */
5168 static rtx
5169 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5171 enum machine_mode mode;
5172 tree arg;
5173 rtx op0;
5175 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5176 return NULL_RTX;
5178 arg = CALL_EXPR_ARG (exp, 0);
5179 mode = TYPE_MODE (TREE_TYPE (arg));
5180 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5182 target = expand_unop (mode, bswap_optab, op0, target, 1);
5184 gcc_assert (target);
5186 return convert_to_mode (mode, target, 0);
5189 /* Expand a call to a unary builtin in EXP.
5190 Return NULL_RTX if a normal call should be emitted rather than expanding the
5191 function in-line. If convenient, the result should be placed in TARGET.
5192 SUBTARGET may be used as the target for computing one of EXP's operands. */
5194 static rtx
5195 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5196 rtx subtarget, optab op_optab)
5198 rtx op0;
5200 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5201 return NULL_RTX;
5203 /* Compute the argument. */
5204 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5205 VOIDmode, EXPAND_NORMAL);
5206 /* Compute op, into TARGET if possible.
5207 Set TARGET to wherever the result comes back. */
5208 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5209 op_optab, op0, target, 1);
5210 gcc_assert (target);
5212 return convert_to_mode (target_mode, target, 0);
5215 /* If the string passed to fputs is a constant and is one character
5216 long, we attempt to transform this call into __builtin_fputc(). */
5218 static rtx
5219 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5221 /* Verify the arguments in the original call. */
5222 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5224 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5225 CALL_EXPR_ARG (exp, 1),
5226 (target == const0_rtx),
5227 unlocked, NULL_TREE);
5228 if (result)
5229 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5231 return NULL_RTX;
5234 /* Expand a call to __builtin_expect. We just return our argument
5235 as the builtin_expect semantic should've been already executed by
5236 tree branch prediction pass. */
5238 static rtx
5239 expand_builtin_expect (tree exp, rtx target)
5241 tree arg, c;
5243 if (call_expr_nargs (exp) < 2)
5244 return const0_rtx;
5245 arg = CALL_EXPR_ARG (exp, 0);
5246 c = CALL_EXPR_ARG (exp, 1);
5248 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5249 /* When guessing was done, the hints should be already stripped away. */
5250 gcc_assert (!flag_guess_branch_prob
5251 || optimize == 0 || errorcount || sorrycount);
5252 return target;
5255 void
5256 expand_builtin_trap (void)
5258 #ifdef HAVE_trap
5259 if (HAVE_trap)
5260 emit_insn (gen_trap ());
5261 else
5262 #endif
5263 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5264 emit_barrier ();
5267 /* Expand EXP, a call to fabs, fabsf or fabsl.
5268 Return NULL_RTX if a normal call should be emitted rather than expanding
5269 the function inline. If convenient, the result should be placed
5270 in TARGET. SUBTARGET may be used as the target for computing
5271 the operand. */
5273 static rtx
5274 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5276 enum machine_mode mode;
5277 tree arg;
5278 rtx op0;
5280 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5281 return NULL_RTX;
5283 arg = CALL_EXPR_ARG (exp, 0);
5284 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5285 mode = TYPE_MODE (TREE_TYPE (arg));
5286 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5287 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5290 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5291 Return NULL is a normal call should be emitted rather than expanding the
5292 function inline. If convenient, the result should be placed in TARGET.
5293 SUBTARGET may be used as the target for computing the operand. */
5295 static rtx
5296 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5298 rtx op0, op1;
5299 tree arg;
5301 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5302 return NULL_RTX;
5304 arg = CALL_EXPR_ARG (exp, 0);
5305 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5307 arg = CALL_EXPR_ARG (exp, 1);
5308 op1 = expand_normal (arg);
5310 return expand_copysign (op0, op1, target);
5313 /* Create a new constant string literal and return a char* pointer to it.
5314 The STRING_CST value is the LEN characters at STR. */
5315 tree
5316 build_string_literal (int len, const char *str)
5318 tree t, elem, index, type;
5320 t = build_string (len, str);
5321 elem = build_type_variant (char_type_node, 1, 0);
5322 index = build_index_type (size_int (len - 1));
5323 type = build_array_type (elem, index);
5324 TREE_TYPE (t) = type;
5325 TREE_CONSTANT (t) = 1;
5326 TREE_READONLY (t) = 1;
5327 TREE_STATIC (t) = 1;
5329 type = build_pointer_type (elem);
5330 t = build1 (ADDR_EXPR, type,
5331 build4 (ARRAY_REF, elem,
5332 t, integer_zero_node, NULL_TREE, NULL_TREE));
5333 return t;
5336 /* Expand EXP, a call to printf or printf_unlocked.
5337 Return NULL_RTX if a normal call should be emitted rather than transforming
5338 the function inline. If convenient, the result should be placed in
5339 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5340 call. */
5341 static rtx
5342 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5343 bool unlocked)
5345 /* If we're using an unlocked function, assume the other unlocked
5346 functions exist explicitly. */
5347 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5348 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5349 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5350 : implicit_built_in_decls[BUILT_IN_PUTS];
5351 const char *fmt_str;
5352 tree fn = 0;
5353 tree fmt, arg;
5354 int nargs = call_expr_nargs (exp);
5356 /* If the return value is used, don't do the transformation. */
5357 if (target != const0_rtx)
5358 return NULL_RTX;
5360 /* Verify the required arguments in the original call. */
5361 if (nargs == 0)
5362 return NULL_RTX;
5363 fmt = CALL_EXPR_ARG (exp, 0);
5364 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5365 return NULL_RTX;
5367 /* Check whether the format is a literal string constant. */
5368 fmt_str = c_getstr (fmt);
5369 if (fmt_str == NULL)
5370 return NULL_RTX;
5372 if (!init_target_chars ())
5373 return NULL_RTX;
5375 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5376 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5378 if ((nargs != 2)
5379 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5380 return NULL_RTX;
5381 if (fn_puts)
5382 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5384 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5385 else if (strcmp (fmt_str, target_percent_c) == 0)
5387 if ((nargs != 2)
5388 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5389 return NULL_RTX;
5390 if (fn_putchar)
5391 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5393 else
5395 /* We can't handle anything else with % args or %% ... yet. */
5396 if (strchr (fmt_str, target_percent))
5397 return NULL_RTX;
5399 if (nargs > 1)
5400 return NULL_RTX;
5402 /* If the format specifier was "", printf does nothing. */
5403 if (fmt_str[0] == '\0')
5404 return const0_rtx;
5405 /* If the format specifier has length of 1, call putchar. */
5406 if (fmt_str[1] == '\0')
5408 /* Given printf("c"), (where c is any one character,)
5409 convert "c"[0] to an int and pass that to the replacement
5410 function. */
5411 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5412 if (fn_putchar)
5413 fn = build_call_expr (fn_putchar, 1, arg);
5415 else
5417 /* If the format specifier was "string\n", call puts("string"). */
5418 size_t len = strlen (fmt_str);
5419 if ((unsigned char)fmt_str[len - 1] == target_newline)
5421 /* Create a NUL-terminated string that's one char shorter
5422 than the original, stripping off the trailing '\n'. */
5423 char *newstr = XALLOCAVEC (char, len);
5424 memcpy (newstr, fmt_str, len - 1);
5425 newstr[len - 1] = 0;
5426 arg = build_string_literal (len, newstr);
5427 if (fn_puts)
5428 fn = build_call_expr (fn_puts, 1, arg);
5430 else
5431 /* We'd like to arrange to call fputs(string,stdout) here,
5432 but we need stdout and don't have a way to get it yet. */
5433 return NULL_RTX;
5437 if (!fn)
5438 return NULL_RTX;
5439 if (TREE_CODE (fn) == CALL_EXPR)
5440 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5441 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5444 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5445 Return NULL_RTX if a normal call should be emitted rather than transforming
5446 the function inline. If convenient, the result should be placed in
5447 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5448 call. */
5449 static rtx
5450 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5451 bool unlocked)
5453 /* If we're using an unlocked function, assume the other unlocked
5454 functions exist explicitly. */
5455 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5456 : implicit_built_in_decls[BUILT_IN_FPUTC];
5457 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5458 : implicit_built_in_decls[BUILT_IN_FPUTS];
5459 const char *fmt_str;
5460 tree fn = 0;
5461 tree fmt, fp, arg;
5462 int nargs = call_expr_nargs (exp);
5464 /* If the return value is used, don't do the transformation. */
5465 if (target != const0_rtx)
5466 return NULL_RTX;
5468 /* Verify the required arguments in the original call. */
5469 if (nargs < 2)
5470 return NULL_RTX;
5471 fp = CALL_EXPR_ARG (exp, 0);
5472 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5473 return NULL_RTX;
5474 fmt = CALL_EXPR_ARG (exp, 1);
5475 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5476 return NULL_RTX;
5478 /* Check whether the format is a literal string constant. */
5479 fmt_str = c_getstr (fmt);
5480 if (fmt_str == NULL)
5481 return NULL_RTX;
5483 if (!init_target_chars ())
5484 return NULL_RTX;
5486 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5487 if (strcmp (fmt_str, target_percent_s) == 0)
5489 if ((nargs != 3)
5490 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5491 return NULL_RTX;
5492 arg = CALL_EXPR_ARG (exp, 2);
5493 if (fn_fputs)
5494 fn = build_call_expr (fn_fputs, 2, arg, fp);
5496 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5497 else if (strcmp (fmt_str, target_percent_c) == 0)
5499 if ((nargs != 3)
5500 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5501 return NULL_RTX;
5502 arg = CALL_EXPR_ARG (exp, 2);
5503 if (fn_fputc)
5504 fn = build_call_expr (fn_fputc, 2, arg, fp);
5506 else
5508 /* We can't handle anything else with % args or %% ... yet. */
5509 if (strchr (fmt_str, target_percent))
5510 return NULL_RTX;
5512 if (nargs > 2)
5513 return NULL_RTX;
5515 /* If the format specifier was "", fprintf does nothing. */
5516 if (fmt_str[0] == '\0')
5518 /* Evaluate and ignore FILE* argument for side-effects. */
5519 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5520 return const0_rtx;
5523 /* When "string" doesn't contain %, replace all cases of
5524 fprintf(stream,string) with fputs(string,stream). The fputs
5525 builtin will take care of special cases like length == 1. */
5526 if (fn_fputs)
5527 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5530 if (!fn)
5531 return NULL_RTX;
5532 if (TREE_CODE (fn) == CALL_EXPR)
5533 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5534 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5537 /* Expand a call EXP to sprintf. Return NULL_RTX if
5538 a normal call should be emitted rather than expanding the function
5539 inline. If convenient, the result should be placed in TARGET with
5540 mode MODE. */
5542 static rtx
5543 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5545 tree dest, fmt;
5546 const char *fmt_str;
5547 int nargs = call_expr_nargs (exp);
5549 /* Verify the required arguments in the original call. */
5550 if (nargs < 2)
5551 return NULL_RTX;
5552 dest = CALL_EXPR_ARG (exp, 0);
5553 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5554 return NULL_RTX;
5555 fmt = CALL_EXPR_ARG (exp, 0);
5556 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5557 return NULL_RTX;
5559 /* Check whether the format is a literal string constant. */
5560 fmt_str = c_getstr (fmt);
5561 if (fmt_str == NULL)
5562 return NULL_RTX;
5564 if (!init_target_chars ())
5565 return NULL_RTX;
5567 /* If the format doesn't contain % args or %%, use strcpy. */
5568 if (strchr (fmt_str, target_percent) == 0)
5570 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5571 tree exp;
5573 if ((nargs > 2) || ! fn)
5574 return NULL_RTX;
5575 expand_expr (build_call_expr (fn, 2, dest, fmt),
5576 const0_rtx, VOIDmode, EXPAND_NORMAL);
5577 if (target == const0_rtx)
5578 return const0_rtx;
5579 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5580 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5582 /* If the format is "%s", use strcpy if the result isn't used. */
5583 else if (strcmp (fmt_str, target_percent_s) == 0)
5585 tree fn, arg, len;
5586 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5588 if (! fn)
5589 return NULL_RTX;
5590 if (nargs != 3)
5591 return NULL_RTX;
5592 arg = CALL_EXPR_ARG (exp, 2);
5593 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5594 return NULL_RTX;
5596 if (target != const0_rtx)
5598 len = c_strlen (arg, 1);
5599 if (! len || TREE_CODE (len) != INTEGER_CST)
5600 return NULL_RTX;
5602 else
5603 len = NULL_TREE;
5605 expand_expr (build_call_expr (fn, 2, dest, arg),
5606 const0_rtx, VOIDmode, EXPAND_NORMAL);
5608 if (target == const0_rtx)
5609 return const0_rtx;
5610 return expand_expr (len, target, mode, EXPAND_NORMAL);
5613 return NULL_RTX;
5616 /* Expand a call to either the entry or exit function profiler. */
5618 static rtx
5619 expand_builtin_profile_func (bool exitp)
5621 rtx this_rtx, which;
5623 this_rtx = DECL_RTL (current_function_decl);
5624 gcc_assert (MEM_P (this_rtx));
5625 this_rtx = XEXP (this_rtx, 0);
5627 if (exitp)
5628 which = profile_function_exit_libfunc;
5629 else
5630 which = profile_function_entry_libfunc;
5632 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5633 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5635 Pmode);
5637 return const0_rtx;
5640 /* Expand a call to __builtin___clear_cache. */
5642 static rtx
5643 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5645 #ifndef HAVE_clear_cache
5646 #ifdef CLEAR_INSN_CACHE
5647 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5648 does something. Just do the default expansion to a call to
5649 __clear_cache(). */
5650 return NULL_RTX;
5651 #else
5652 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5653 does nothing. There is no need to call it. Do nothing. */
5654 return const0_rtx;
5655 #endif /* CLEAR_INSN_CACHE */
5656 #else
5657 /* We have a "clear_cache" insn, and it will handle everything. */
5658 tree begin, end;
5659 rtx begin_rtx, end_rtx;
5660 enum insn_code icode;
5662 /* We must not expand to a library call. If we did, any
5663 fallback library function in libgcc that might contain a call to
5664 __builtin___clear_cache() would recurse infinitely. */
5665 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5667 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5668 return const0_rtx;
5671 if (HAVE_clear_cache)
5673 icode = CODE_FOR_clear_cache;
5675 begin = CALL_EXPR_ARG (exp, 0);
5676 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5677 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5678 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5679 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5681 end = CALL_EXPR_ARG (exp, 1);
5682 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5683 end_rtx = convert_memory_address (Pmode, end_rtx);
5684 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5685 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5687 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5689 return const0_rtx;
5690 #endif /* HAVE_clear_cache */
5693 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5695 static rtx
5696 round_trampoline_addr (rtx tramp)
5698 rtx temp, addend, mask;
5700 /* If we don't need too much alignment, we'll have been guaranteed
5701 proper alignment by get_trampoline_type. */
5702 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5703 return tramp;
5705 /* Round address up to desired boundary. */
5706 temp = gen_reg_rtx (Pmode);
5707 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5708 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5710 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5711 temp, 0, OPTAB_LIB_WIDEN);
5712 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5713 temp, 0, OPTAB_LIB_WIDEN);
5715 return tramp;
5718 static rtx
5719 expand_builtin_init_trampoline (tree exp)
5721 tree t_tramp, t_func, t_chain;
5722 rtx r_tramp, r_func, r_chain;
5723 #ifdef TRAMPOLINE_TEMPLATE
5724 rtx blktramp;
5725 #endif
5727 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5728 POINTER_TYPE, VOID_TYPE))
5729 return NULL_RTX;
5731 t_tramp = CALL_EXPR_ARG (exp, 0);
5732 t_func = CALL_EXPR_ARG (exp, 1);
5733 t_chain = CALL_EXPR_ARG (exp, 2);
5735 r_tramp = expand_normal (t_tramp);
5736 r_func = expand_normal (t_func);
5737 r_chain = expand_normal (t_chain);
5739 /* Generate insns to initialize the trampoline. */
5740 r_tramp = round_trampoline_addr (r_tramp);
5741 #ifdef TRAMPOLINE_TEMPLATE
5742 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5743 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5744 emit_block_move (blktramp, assemble_trampoline_template (),
5745 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5746 #endif
5747 trampolines_created = 1;
5748 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5750 return const0_rtx;
5753 static rtx
5754 expand_builtin_adjust_trampoline (tree exp)
5756 rtx tramp;
5758 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5759 return NULL_RTX;
5761 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5762 tramp = round_trampoline_addr (tramp);
5763 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5764 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5765 #endif
5767 return tramp;
5770 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5771 function. The function first checks whether the back end provides
5772 an insn to implement signbit for the respective mode. If not, it
5773 checks whether the floating point format of the value is such that
5774 the sign bit can be extracted. If that is not the case, the
5775 function returns NULL_RTX to indicate that a normal call should be
5776 emitted rather than expanding the function in-line. EXP is the
5777 expression that is a call to the builtin function; if convenient,
5778 the result should be placed in TARGET. */
5779 static rtx
5780 expand_builtin_signbit (tree exp, rtx target)
5782 const struct real_format *fmt;
5783 enum machine_mode fmode, imode, rmode;
5784 HOST_WIDE_INT hi, lo;
5785 tree arg;
5786 int word, bitpos;
5787 enum insn_code icode;
5788 rtx temp;
5790 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5791 return NULL_RTX;
5793 arg = CALL_EXPR_ARG (exp, 0);
5794 fmode = TYPE_MODE (TREE_TYPE (arg));
5795 rmode = TYPE_MODE (TREE_TYPE (exp));
5796 fmt = REAL_MODE_FORMAT (fmode);
5798 arg = builtin_save_expr (arg);
5800 /* Expand the argument yielding a RTX expression. */
5801 temp = expand_normal (arg);
5803 /* Check if the back end provides an insn that handles signbit for the
5804 argument's mode. */
5805 icode = signbit_optab->handlers [(int) fmode].insn_code;
5806 if (icode != CODE_FOR_nothing)
5808 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5809 emit_unop_insn (icode, target, temp, UNKNOWN);
5810 return target;
5813 /* For floating point formats without a sign bit, implement signbit
5814 as "ARG < 0.0". */
5815 bitpos = fmt->signbit_ro;
5816 if (bitpos < 0)
5818 /* But we can't do this if the format supports signed zero. */
5819 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5820 return NULL_RTX;
5822 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5823 build_real (TREE_TYPE (arg), dconst0));
5824 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5827 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5829 imode = int_mode_for_mode (fmode);
5830 if (imode == BLKmode)
5831 return NULL_RTX;
5832 temp = gen_lowpart (imode, temp);
5834 else
5836 imode = word_mode;
5837 /* Handle targets with different FP word orders. */
5838 if (FLOAT_WORDS_BIG_ENDIAN)
5839 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5840 else
5841 word = bitpos / BITS_PER_WORD;
5842 temp = operand_subword_force (temp, word, fmode);
5843 bitpos = bitpos % BITS_PER_WORD;
5846 /* Force the intermediate word_mode (or narrower) result into a
5847 register. This avoids attempting to create paradoxical SUBREGs
5848 of floating point modes below. */
5849 temp = force_reg (imode, temp);
5851 /* If the bitpos is within the "result mode" lowpart, the operation
5852 can be implement with a single bitwise AND. Otherwise, we need
5853 a right shift and an AND. */
5855 if (bitpos < GET_MODE_BITSIZE (rmode))
5857 if (bitpos < HOST_BITS_PER_WIDE_INT)
5859 hi = 0;
5860 lo = (HOST_WIDE_INT) 1 << bitpos;
5862 else
5864 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5865 lo = 0;
5868 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5869 temp = gen_lowpart (rmode, temp);
5870 temp = expand_binop (rmode, and_optab, temp,
5871 immed_double_const (lo, hi, rmode),
5872 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5874 else
5876 /* Perform a logical right shift to place the signbit in the least
5877 significant bit, then truncate the result to the desired mode
5878 and mask just this bit. */
5879 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5880 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5881 temp = gen_lowpart (rmode, temp);
5882 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5883 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5886 return temp;
5889 /* Expand fork or exec calls. TARGET is the desired target of the
5890 call. EXP is the call. FN is the
5891 identificator of the actual function. IGNORE is nonzero if the
5892 value is to be ignored. */
5894 static rtx
5895 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5897 tree id, decl;
5898 tree call;
5900 /* If we are not profiling, just call the function. */
5901 if (!profile_arc_flag)
5902 return NULL_RTX;
5904 /* Otherwise call the wrapper. This should be equivalent for the rest of
5905 compiler, so the code does not diverge, and the wrapper may run the
5906 code necessary for keeping the profiling sane. */
5908 switch (DECL_FUNCTION_CODE (fn))
5910 case BUILT_IN_FORK:
5911 id = get_identifier ("__gcov_fork");
5912 break;
5914 case BUILT_IN_EXECL:
5915 id = get_identifier ("__gcov_execl");
5916 break;
5918 case BUILT_IN_EXECV:
5919 id = get_identifier ("__gcov_execv");
5920 break;
5922 case BUILT_IN_EXECLP:
5923 id = get_identifier ("__gcov_execlp");
5924 break;
5926 case BUILT_IN_EXECLE:
5927 id = get_identifier ("__gcov_execle");
5928 break;
5930 case BUILT_IN_EXECVP:
5931 id = get_identifier ("__gcov_execvp");
5932 break;
5934 case BUILT_IN_EXECVE:
5935 id = get_identifier ("__gcov_execve");
5936 break;
5938 default:
5939 gcc_unreachable ();
5942 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5943 DECL_EXTERNAL (decl) = 1;
5944 TREE_PUBLIC (decl) = 1;
5945 DECL_ARTIFICIAL (decl) = 1;
5946 TREE_NOTHROW (decl) = 1;
5947 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5948 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5949 call = rewrite_call_expr (exp, 0, decl, 0);
5950 return expand_call (call, target, ignore);
5955 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5956 the pointer in these functions is void*, the tree optimizers may remove
5957 casts. The mode computed in expand_builtin isn't reliable either, due
5958 to __sync_bool_compare_and_swap.
5960 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5961 group of builtins. This gives us log2 of the mode size. */
5963 static inline enum machine_mode
5964 get_builtin_sync_mode (int fcode_diff)
5966 /* The size is not negotiable, so ask not to get BLKmode in return
5967 if the target indicates that a smaller size would be better. */
5968 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5971 /* Expand the memory expression LOC and return the appropriate memory operand
5972 for the builtin_sync operations. */
5974 static rtx
5975 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5977 rtx addr, mem;
5979 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5981 /* Note that we explicitly do not want any alias information for this
5982 memory, so that we kill all other live memories. Otherwise we don't
5983 satisfy the full barrier semantics of the intrinsic. */
5984 mem = validize_mem (gen_rtx_MEM (mode, addr));
5986 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5987 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5988 MEM_VOLATILE_P (mem) = 1;
5990 return mem;
5993 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5994 EXP is the CALL_EXPR. CODE is the rtx code
5995 that corresponds to the arithmetic or logical operation from the name;
5996 an exception here is that NOT actually means NAND. TARGET is an optional
5997 place for us to store the results; AFTER is true if this is the
5998 fetch_and_xxx form. IGNORE is true if we don't actually care about
5999 the result of the operation at all. */
6001 static rtx
6002 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6003 enum rtx_code code, bool after,
6004 rtx target, bool ignore)
6006 rtx val, mem;
6007 enum machine_mode old_mode;
6009 if (code == NOT && warn_sync_nand)
6011 tree fndecl = get_callee_fndecl (exp);
6012 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6014 static bool warned_f_a_n, warned_n_a_f;
6016 switch (fcode)
6018 case BUILT_IN_FETCH_AND_NAND_1:
6019 case BUILT_IN_FETCH_AND_NAND_2:
6020 case BUILT_IN_FETCH_AND_NAND_4:
6021 case BUILT_IN_FETCH_AND_NAND_8:
6022 case BUILT_IN_FETCH_AND_NAND_16:
6024 if (warned_f_a_n)
6025 break;
6027 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6028 inform (input_location,
6029 "%qD changed semantics in GCC 4.4", fndecl);
6030 warned_f_a_n = true;
6031 break;
6033 case BUILT_IN_NAND_AND_FETCH_1:
6034 case BUILT_IN_NAND_AND_FETCH_2:
6035 case BUILT_IN_NAND_AND_FETCH_4:
6036 case BUILT_IN_NAND_AND_FETCH_8:
6037 case BUILT_IN_NAND_AND_FETCH_16:
6039 if (warned_n_a_f)
6040 break;
6042 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6043 inform (input_location,
6044 "%qD changed semantics in GCC 4.4", fndecl);
6045 warned_n_a_f = true;
6046 break;
6048 default:
6049 gcc_unreachable ();
6053 /* Expand the operands. */
6054 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6056 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6057 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6058 of CONST_INTs, where we know the old_mode only from the call argument. */
6059 old_mode = GET_MODE (val);
6060 if (old_mode == VOIDmode)
6061 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6062 val = convert_modes (mode, old_mode, val, 1);
6064 if (ignore)
6065 return expand_sync_operation (mem, val, code);
6066 else
6067 return expand_sync_fetch_operation (mem, val, code, after, target);
6070 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6071 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6072 true if this is the boolean form. TARGET is a place for us to store the
6073 results; this is NOT optional if IS_BOOL is true. */
6075 static rtx
6076 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6077 bool is_bool, rtx target)
6079 rtx old_val, new_val, mem;
6080 enum machine_mode old_mode;
6082 /* Expand the operands. */
6083 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6086 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6087 mode, EXPAND_NORMAL);
6088 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6089 of CONST_INTs, where we know the old_mode only from the call argument. */
6090 old_mode = GET_MODE (old_val);
6091 if (old_mode == VOIDmode)
6092 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6093 old_val = convert_modes (mode, old_mode, old_val, 1);
6095 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6096 mode, EXPAND_NORMAL);
6097 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6098 of CONST_INTs, where we know the old_mode only from the call argument. */
6099 old_mode = GET_MODE (new_val);
6100 if (old_mode == VOIDmode)
6101 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6102 new_val = convert_modes (mode, old_mode, new_val, 1);
6104 if (is_bool)
6105 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6106 else
6107 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6110 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6111 general form is actually an atomic exchange, and some targets only
6112 support a reduced form with the second argument being a constant 1.
6113 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6114 the results. */
6116 static rtx
6117 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6118 rtx target)
6120 rtx val, mem;
6121 enum machine_mode old_mode;
6123 /* Expand the operands. */
6124 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6125 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6126 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6127 of CONST_INTs, where we know the old_mode only from the call argument. */
6128 old_mode = GET_MODE (val);
6129 if (old_mode == VOIDmode)
6130 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6131 val = convert_modes (mode, old_mode, val, 1);
6133 return expand_sync_lock_test_and_set (mem, val, target);
6136 /* Expand the __sync_synchronize intrinsic. */
6138 static void
6139 expand_builtin_synchronize (void)
6141 tree x;
6143 #ifdef HAVE_memory_barrier
6144 if (HAVE_memory_barrier)
6146 emit_insn (gen_memory_barrier ());
6147 return;
6149 #endif
6151 if (synchronize_libfunc != NULL_RTX)
6153 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6154 return;
6157 /* If no explicit memory barrier instruction is available, create an
6158 empty asm stmt with a memory clobber. */
6159 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6160 tree_cons (NULL, build_string (6, "memory"), NULL));
6161 ASM_VOLATILE_P (x) = 1;
6162 expand_asm_expr (x);
6165 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6167 static void
6168 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6170 enum insn_code icode;
6171 rtx mem, insn;
6172 rtx val = const0_rtx;
6174 /* Expand the operands. */
6175 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6177 /* If there is an explicit operation in the md file, use it. */
6178 icode = sync_lock_release[mode];
6179 if (icode != CODE_FOR_nothing)
6181 if (!insn_data[icode].operand[1].predicate (val, mode))
6182 val = force_reg (mode, val);
6184 insn = GEN_FCN (icode) (mem, val);
6185 if (insn)
6187 emit_insn (insn);
6188 return;
6192 /* Otherwise we can implement this operation by emitting a barrier
6193 followed by a store of zero. */
6194 expand_builtin_synchronize ();
6195 emit_move_insn (mem, val);
6198 /* Expand an expression EXP that calls a built-in function,
6199 with result going to TARGET if that's convenient
6200 (and in mode MODE if that's convenient).
6201 SUBTARGET may be used as the target for computing one of EXP's operands.
6202 IGNORE is nonzero if the value is to be ignored. */
6205 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6206 int ignore)
6208 tree fndecl = get_callee_fndecl (exp);
6209 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6210 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6212 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6213 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6215 /* When not optimizing, generate calls to library functions for a certain
6216 set of builtins. */
6217 if (!optimize
6218 && !called_as_built_in (fndecl)
6219 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6220 && fcode != BUILT_IN_ALLOCA
6221 && fcode != BUILT_IN_FREE)
6222 return expand_call (exp, target, ignore);
6224 /* The built-in function expanders test for target == const0_rtx
6225 to determine whether the function's result will be ignored. */
6226 if (ignore)
6227 target = const0_rtx;
6229 /* If the result of a pure or const built-in function is ignored, and
6230 none of its arguments are volatile, we can avoid expanding the
6231 built-in call and just evaluate the arguments for side-effects. */
6232 if (target == const0_rtx
6233 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6235 bool volatilep = false;
6236 tree arg;
6237 call_expr_arg_iterator iter;
6239 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6240 if (TREE_THIS_VOLATILE (arg))
6242 volatilep = true;
6243 break;
6246 if (! volatilep)
6248 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6249 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6250 return const0_rtx;
6254 switch (fcode)
6256 CASE_FLT_FN (BUILT_IN_FABS):
6257 target = expand_builtin_fabs (exp, target, subtarget);
6258 if (target)
6259 return target;
6260 break;
6262 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6263 target = expand_builtin_copysign (exp, target, subtarget);
6264 if (target)
6265 return target;
6266 break;
6268 /* Just do a normal library call if we were unable to fold
6269 the values. */
6270 CASE_FLT_FN (BUILT_IN_CABS):
6271 break;
6273 CASE_FLT_FN (BUILT_IN_EXP):
6274 CASE_FLT_FN (BUILT_IN_EXP10):
6275 CASE_FLT_FN (BUILT_IN_POW10):
6276 CASE_FLT_FN (BUILT_IN_EXP2):
6277 CASE_FLT_FN (BUILT_IN_EXPM1):
6278 CASE_FLT_FN (BUILT_IN_LOGB):
6279 CASE_FLT_FN (BUILT_IN_LOG):
6280 CASE_FLT_FN (BUILT_IN_LOG10):
6281 CASE_FLT_FN (BUILT_IN_LOG2):
6282 CASE_FLT_FN (BUILT_IN_LOG1P):
6283 CASE_FLT_FN (BUILT_IN_TAN):
6284 CASE_FLT_FN (BUILT_IN_ASIN):
6285 CASE_FLT_FN (BUILT_IN_ACOS):
6286 CASE_FLT_FN (BUILT_IN_ATAN):
6287 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6288 because of possible accuracy problems. */
6289 if (! flag_unsafe_math_optimizations)
6290 break;
6291 CASE_FLT_FN (BUILT_IN_SQRT):
6292 CASE_FLT_FN (BUILT_IN_FLOOR):
6293 CASE_FLT_FN (BUILT_IN_CEIL):
6294 CASE_FLT_FN (BUILT_IN_TRUNC):
6295 CASE_FLT_FN (BUILT_IN_ROUND):
6296 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6297 CASE_FLT_FN (BUILT_IN_RINT):
6298 target = expand_builtin_mathfn (exp, target, subtarget);
6299 if (target)
6300 return target;
6301 break;
6303 CASE_FLT_FN (BUILT_IN_ILOGB):
6304 if (! flag_unsafe_math_optimizations)
6305 break;
6306 CASE_FLT_FN (BUILT_IN_ISINF):
6307 CASE_FLT_FN (BUILT_IN_FINITE):
6308 case BUILT_IN_ISFINITE:
6309 case BUILT_IN_ISNORMAL:
6310 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6311 if (target)
6312 return target;
6313 break;
6315 CASE_FLT_FN (BUILT_IN_LCEIL):
6316 CASE_FLT_FN (BUILT_IN_LLCEIL):
6317 CASE_FLT_FN (BUILT_IN_LFLOOR):
6318 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6319 target = expand_builtin_int_roundingfn (exp, target);
6320 if (target)
6321 return target;
6322 break;
6324 CASE_FLT_FN (BUILT_IN_LRINT):
6325 CASE_FLT_FN (BUILT_IN_LLRINT):
6326 CASE_FLT_FN (BUILT_IN_LROUND):
6327 CASE_FLT_FN (BUILT_IN_LLROUND):
6328 target = expand_builtin_int_roundingfn_2 (exp, target);
6329 if (target)
6330 return target;
6331 break;
6333 CASE_FLT_FN (BUILT_IN_POW):
6334 target = expand_builtin_pow (exp, target, subtarget);
6335 if (target)
6336 return target;
6337 break;
6339 CASE_FLT_FN (BUILT_IN_POWI):
6340 target = expand_builtin_powi (exp, target, subtarget);
6341 if (target)
6342 return target;
6343 break;
6345 CASE_FLT_FN (BUILT_IN_ATAN2):
6346 CASE_FLT_FN (BUILT_IN_LDEXP):
6347 CASE_FLT_FN (BUILT_IN_SCALB):
6348 CASE_FLT_FN (BUILT_IN_SCALBN):
6349 CASE_FLT_FN (BUILT_IN_SCALBLN):
6350 if (! flag_unsafe_math_optimizations)
6351 break;
6353 CASE_FLT_FN (BUILT_IN_FMOD):
6354 CASE_FLT_FN (BUILT_IN_REMAINDER):
6355 CASE_FLT_FN (BUILT_IN_DREM):
6356 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6357 if (target)
6358 return target;
6359 break;
6361 CASE_FLT_FN (BUILT_IN_CEXPI):
6362 target = expand_builtin_cexpi (exp, target, subtarget);
6363 gcc_assert (target);
6364 return target;
6366 CASE_FLT_FN (BUILT_IN_SIN):
6367 CASE_FLT_FN (BUILT_IN_COS):
6368 if (! flag_unsafe_math_optimizations)
6369 break;
6370 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6371 if (target)
6372 return target;
6373 break;
6375 CASE_FLT_FN (BUILT_IN_SINCOS):
6376 if (! flag_unsafe_math_optimizations)
6377 break;
6378 target = expand_builtin_sincos (exp);
6379 if (target)
6380 return target;
6381 break;
6383 case BUILT_IN_APPLY_ARGS:
6384 return expand_builtin_apply_args ();
6386 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6387 FUNCTION with a copy of the parameters described by
6388 ARGUMENTS, and ARGSIZE. It returns a block of memory
6389 allocated on the stack into which is stored all the registers
6390 that might possibly be used for returning the result of a
6391 function. ARGUMENTS is the value returned by
6392 __builtin_apply_args. ARGSIZE is the number of bytes of
6393 arguments that must be copied. ??? How should this value be
6394 computed? We'll also need a safe worst case value for varargs
6395 functions. */
6396 case BUILT_IN_APPLY:
6397 if (!validate_arglist (exp, POINTER_TYPE,
6398 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6399 && !validate_arglist (exp, REFERENCE_TYPE,
6400 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6401 return const0_rtx;
6402 else
6404 rtx ops[3];
6406 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6407 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6408 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6410 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6413 /* __builtin_return (RESULT) causes the function to return the
6414 value described by RESULT. RESULT is address of the block of
6415 memory returned by __builtin_apply. */
6416 case BUILT_IN_RETURN:
6417 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6418 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6419 return const0_rtx;
6421 case BUILT_IN_SAVEREGS:
6422 return expand_builtin_saveregs ();
6424 case BUILT_IN_ARGS_INFO:
6425 return expand_builtin_args_info (exp);
6427 case BUILT_IN_VA_ARG_PACK:
6428 /* All valid uses of __builtin_va_arg_pack () are removed during
6429 inlining. */
6430 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6431 return const0_rtx;
6433 case BUILT_IN_VA_ARG_PACK_LEN:
6434 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6435 inlining. */
6436 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6437 return const0_rtx;
6439 /* Return the address of the first anonymous stack arg. */
6440 case BUILT_IN_NEXT_ARG:
6441 if (fold_builtin_next_arg (exp, false))
6442 return const0_rtx;
6443 return expand_builtin_next_arg ();
6445 case BUILT_IN_CLEAR_CACHE:
6446 target = expand_builtin___clear_cache (exp);
6447 if (target)
6448 return target;
6449 break;
6451 case BUILT_IN_CLASSIFY_TYPE:
6452 return expand_builtin_classify_type (exp);
6454 case BUILT_IN_CONSTANT_P:
6455 return const0_rtx;
6457 case BUILT_IN_FRAME_ADDRESS:
6458 case BUILT_IN_RETURN_ADDRESS:
6459 return expand_builtin_frame_address (fndecl, exp);
6461 /* Returns the address of the area where the structure is returned.
6462 0 otherwise. */
6463 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6464 if (call_expr_nargs (exp) != 0
6465 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6466 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6467 return const0_rtx;
6468 else
6469 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6471 case BUILT_IN_ALLOCA:
6472 target = expand_builtin_alloca (exp, target);
6473 if (target)
6474 return target;
6475 break;
6477 case BUILT_IN_STACK_SAVE:
6478 return expand_stack_save ();
6480 case BUILT_IN_STACK_RESTORE:
6481 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6482 return const0_rtx;
6484 case BUILT_IN_BSWAP32:
6485 case BUILT_IN_BSWAP64:
6486 target = expand_builtin_bswap (exp, target, subtarget);
6488 if (target)
6489 return target;
6490 break;
6492 CASE_INT_FN (BUILT_IN_FFS):
6493 case BUILT_IN_FFSIMAX:
6494 target = expand_builtin_unop (target_mode, exp, target,
6495 subtarget, ffs_optab);
6496 if (target)
6497 return target;
6498 break;
6500 CASE_INT_FN (BUILT_IN_CLZ):
6501 case BUILT_IN_CLZIMAX:
6502 target = expand_builtin_unop (target_mode, exp, target,
6503 subtarget, clz_optab);
6504 if (target)
6505 return target;
6506 break;
6508 CASE_INT_FN (BUILT_IN_CTZ):
6509 case BUILT_IN_CTZIMAX:
6510 target = expand_builtin_unop (target_mode, exp, target,
6511 subtarget, ctz_optab);
6512 if (target)
6513 return target;
6514 break;
6516 CASE_INT_FN (BUILT_IN_POPCOUNT):
6517 case BUILT_IN_POPCOUNTIMAX:
6518 target = expand_builtin_unop (target_mode, exp, target,
6519 subtarget, popcount_optab);
6520 if (target)
6521 return target;
6522 break;
6524 CASE_INT_FN (BUILT_IN_PARITY):
6525 case BUILT_IN_PARITYIMAX:
6526 target = expand_builtin_unop (target_mode, exp, target,
6527 subtarget, parity_optab);
6528 if (target)
6529 return target;
6530 break;
6532 case BUILT_IN_STRLEN:
6533 target = expand_builtin_strlen (exp, target, target_mode);
6534 if (target)
6535 return target;
6536 break;
6538 case BUILT_IN_STRCPY:
6539 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6540 if (target)
6541 return target;
6542 break;
6544 case BUILT_IN_STRNCPY:
6545 target = expand_builtin_strncpy (exp, target, mode);
6546 if (target)
6547 return target;
6548 break;
6550 case BUILT_IN_STPCPY:
6551 target = expand_builtin_stpcpy (exp, target, mode);
6552 if (target)
6553 return target;
6554 break;
6556 case BUILT_IN_STRCAT:
6557 target = expand_builtin_strcat (fndecl, exp, target, mode);
6558 if (target)
6559 return target;
6560 break;
6562 case BUILT_IN_STRNCAT:
6563 target = expand_builtin_strncat (exp, target, mode);
6564 if (target)
6565 return target;
6566 break;
6568 case BUILT_IN_STRSPN:
6569 target = expand_builtin_strspn (exp, target, mode);
6570 if (target)
6571 return target;
6572 break;
6574 case BUILT_IN_STRCSPN:
6575 target = expand_builtin_strcspn (exp, target, mode);
6576 if (target)
6577 return target;
6578 break;
6580 case BUILT_IN_STRSTR:
6581 target = expand_builtin_strstr (exp, target, mode);
6582 if (target)
6583 return target;
6584 break;
6586 case BUILT_IN_STRPBRK:
6587 target = expand_builtin_strpbrk (exp, target, mode);
6588 if (target)
6589 return target;
6590 break;
6592 case BUILT_IN_INDEX:
6593 case BUILT_IN_STRCHR:
6594 target = expand_builtin_strchr (exp, target, mode);
6595 if (target)
6596 return target;
6597 break;
6599 case BUILT_IN_RINDEX:
6600 case BUILT_IN_STRRCHR:
6601 target = expand_builtin_strrchr (exp, target, mode);
6602 if (target)
6603 return target;
6604 break;
6606 case BUILT_IN_MEMCPY:
6607 target = expand_builtin_memcpy (exp, target, mode);
6608 if (target)
6609 return target;
6610 break;
6612 case BUILT_IN_MEMPCPY:
6613 target = expand_builtin_mempcpy (exp, target, mode);
6614 if (target)
6615 return target;
6616 break;
6618 case BUILT_IN_MEMMOVE:
6619 target = expand_builtin_memmove (exp, target, mode, ignore);
6620 if (target)
6621 return target;
6622 break;
6624 case BUILT_IN_BCOPY:
6625 target = expand_builtin_bcopy (exp, ignore);
6626 if (target)
6627 return target;
6628 break;
6630 case BUILT_IN_MEMSET:
6631 target = expand_builtin_memset (exp, target, mode);
6632 if (target)
6633 return target;
6634 break;
6636 case BUILT_IN_BZERO:
6637 target = expand_builtin_bzero (exp);
6638 if (target)
6639 return target;
6640 break;
6642 case BUILT_IN_STRCMP:
6643 target = expand_builtin_strcmp (exp, target, mode);
6644 if (target)
6645 return target;
6646 break;
6648 case BUILT_IN_STRNCMP:
6649 target = expand_builtin_strncmp (exp, target, mode);
6650 if (target)
6651 return target;
6652 break;
6654 case BUILT_IN_MEMCHR:
6655 target = expand_builtin_memchr (exp, target, mode);
6656 if (target)
6657 return target;
6658 break;
6660 case BUILT_IN_BCMP:
6661 case BUILT_IN_MEMCMP:
6662 target = expand_builtin_memcmp (exp, target, mode);
6663 if (target)
6664 return target;
6665 break;
6667 case BUILT_IN_SETJMP:
6668 /* This should have been lowered to the builtins below. */
6669 gcc_unreachable ();
6671 case BUILT_IN_SETJMP_SETUP:
6672 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6673 and the receiver label. */
6674 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6676 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6677 VOIDmode, EXPAND_NORMAL);
6678 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6679 rtx label_r = label_rtx (label);
6681 /* This is copied from the handling of non-local gotos. */
6682 expand_builtin_setjmp_setup (buf_addr, label_r);
6683 nonlocal_goto_handler_labels
6684 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6685 nonlocal_goto_handler_labels);
6686 /* ??? Do not let expand_label treat us as such since we would
6687 not want to be both on the list of non-local labels and on
6688 the list of forced labels. */
6689 FORCED_LABEL (label) = 0;
6690 return const0_rtx;
6692 break;
6694 case BUILT_IN_SETJMP_DISPATCHER:
6695 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6696 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6698 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6699 rtx label_r = label_rtx (label);
6701 /* Remove the dispatcher label from the list of non-local labels
6702 since the receiver labels have been added to it above. */
6703 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6704 return const0_rtx;
6706 break;
6708 case BUILT_IN_SETJMP_RECEIVER:
6709 /* __builtin_setjmp_receiver is passed the receiver label. */
6710 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6712 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6713 rtx label_r = label_rtx (label);
6715 expand_builtin_setjmp_receiver (label_r);
6716 return const0_rtx;
6718 break;
6720 /* __builtin_longjmp is passed a pointer to an array of five words.
6721 It's similar to the C library longjmp function but works with
6722 __builtin_setjmp above. */
6723 case BUILT_IN_LONGJMP:
6724 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6726 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6727 VOIDmode, EXPAND_NORMAL);
6728 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6730 if (value != const1_rtx)
6732 error ("%<__builtin_longjmp%> second argument must be 1");
6733 return const0_rtx;
6736 expand_builtin_longjmp (buf_addr, value);
6737 return const0_rtx;
6739 break;
6741 case BUILT_IN_NONLOCAL_GOTO:
6742 target = expand_builtin_nonlocal_goto (exp);
6743 if (target)
6744 return target;
6745 break;
6747 /* This updates the setjmp buffer that is its argument with the value
6748 of the current stack pointer. */
6749 case BUILT_IN_UPDATE_SETJMP_BUF:
6750 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6752 rtx buf_addr
6753 = expand_normal (CALL_EXPR_ARG (exp, 0));
6755 expand_builtin_update_setjmp_buf (buf_addr);
6756 return const0_rtx;
6758 break;
6760 case BUILT_IN_TRAP:
6761 expand_builtin_trap ();
6762 return const0_rtx;
6764 case BUILT_IN_PRINTF:
6765 target = expand_builtin_printf (exp, target, mode, false);
6766 if (target)
6767 return target;
6768 break;
6770 case BUILT_IN_PRINTF_UNLOCKED:
6771 target = expand_builtin_printf (exp, target, mode, true);
6772 if (target)
6773 return target;
6774 break;
6776 case BUILT_IN_FPUTS:
6777 target = expand_builtin_fputs (exp, target, false);
6778 if (target)
6779 return target;
6780 break;
6781 case BUILT_IN_FPUTS_UNLOCKED:
6782 target = expand_builtin_fputs (exp, target, true);
6783 if (target)
6784 return target;
6785 break;
6787 case BUILT_IN_FPRINTF:
6788 target = expand_builtin_fprintf (exp, target, mode, false);
6789 if (target)
6790 return target;
6791 break;
6793 case BUILT_IN_FPRINTF_UNLOCKED:
6794 target = expand_builtin_fprintf (exp, target, mode, true);
6795 if (target)
6796 return target;
6797 break;
6799 case BUILT_IN_SPRINTF:
6800 target = expand_builtin_sprintf (exp, target, mode);
6801 if (target)
6802 return target;
6803 break;
6805 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6806 case BUILT_IN_SIGNBITD32:
6807 case BUILT_IN_SIGNBITD64:
6808 case BUILT_IN_SIGNBITD128:
6809 target = expand_builtin_signbit (exp, target);
6810 if (target)
6811 return target;
6812 break;
6814 /* Various hooks for the DWARF 2 __throw routine. */
6815 case BUILT_IN_UNWIND_INIT:
6816 expand_builtin_unwind_init ();
6817 return const0_rtx;
6818 case BUILT_IN_DWARF_CFA:
6819 return virtual_cfa_rtx;
6820 #ifdef DWARF2_UNWIND_INFO
6821 case BUILT_IN_DWARF_SP_COLUMN:
6822 return expand_builtin_dwarf_sp_column ();
6823 case BUILT_IN_INIT_DWARF_REG_SIZES:
6824 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6825 return const0_rtx;
6826 #endif
6827 case BUILT_IN_FROB_RETURN_ADDR:
6828 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6829 case BUILT_IN_EXTRACT_RETURN_ADDR:
6830 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6831 case BUILT_IN_EH_RETURN:
6832 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6833 CALL_EXPR_ARG (exp, 1));
6834 return const0_rtx;
6835 #ifdef EH_RETURN_DATA_REGNO
6836 case BUILT_IN_EH_RETURN_DATA_REGNO:
6837 return expand_builtin_eh_return_data_regno (exp);
6838 #endif
6839 case BUILT_IN_EXTEND_POINTER:
6840 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6842 case BUILT_IN_VA_START:
6843 return expand_builtin_va_start (exp);
6844 case BUILT_IN_VA_END:
6845 return expand_builtin_va_end (exp);
6846 case BUILT_IN_VA_COPY:
6847 return expand_builtin_va_copy (exp);
6848 case BUILT_IN_EXPECT:
6849 return expand_builtin_expect (exp, target);
6850 case BUILT_IN_PREFETCH:
6851 expand_builtin_prefetch (exp);
6852 return const0_rtx;
6854 case BUILT_IN_PROFILE_FUNC_ENTER:
6855 return expand_builtin_profile_func (false);
6856 case BUILT_IN_PROFILE_FUNC_EXIT:
6857 return expand_builtin_profile_func (true);
6859 case BUILT_IN_INIT_TRAMPOLINE:
6860 return expand_builtin_init_trampoline (exp);
6861 case BUILT_IN_ADJUST_TRAMPOLINE:
6862 return expand_builtin_adjust_trampoline (exp);
6864 case BUILT_IN_FORK:
6865 case BUILT_IN_EXECL:
6866 case BUILT_IN_EXECV:
6867 case BUILT_IN_EXECLP:
6868 case BUILT_IN_EXECLE:
6869 case BUILT_IN_EXECVP:
6870 case BUILT_IN_EXECVE:
6871 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6872 if (target)
6873 return target;
6874 break;
6876 case BUILT_IN_FETCH_AND_ADD_1:
6877 case BUILT_IN_FETCH_AND_ADD_2:
6878 case BUILT_IN_FETCH_AND_ADD_4:
6879 case BUILT_IN_FETCH_AND_ADD_8:
6880 case BUILT_IN_FETCH_AND_ADD_16:
6881 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6882 target = expand_builtin_sync_operation (mode, exp, PLUS,
6883 false, target, ignore);
6884 if (target)
6885 return target;
6886 break;
6888 case BUILT_IN_FETCH_AND_SUB_1:
6889 case BUILT_IN_FETCH_AND_SUB_2:
6890 case BUILT_IN_FETCH_AND_SUB_4:
6891 case BUILT_IN_FETCH_AND_SUB_8:
6892 case BUILT_IN_FETCH_AND_SUB_16:
6893 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6894 target = expand_builtin_sync_operation (mode, exp, MINUS,
6895 false, target, ignore);
6896 if (target)
6897 return target;
6898 break;
6900 case BUILT_IN_FETCH_AND_OR_1:
6901 case BUILT_IN_FETCH_AND_OR_2:
6902 case BUILT_IN_FETCH_AND_OR_4:
6903 case BUILT_IN_FETCH_AND_OR_8:
6904 case BUILT_IN_FETCH_AND_OR_16:
6905 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6906 target = expand_builtin_sync_operation (mode, exp, IOR,
6907 false, target, ignore);
6908 if (target)
6909 return target;
6910 break;
6912 case BUILT_IN_FETCH_AND_AND_1:
6913 case BUILT_IN_FETCH_AND_AND_2:
6914 case BUILT_IN_FETCH_AND_AND_4:
6915 case BUILT_IN_FETCH_AND_AND_8:
6916 case BUILT_IN_FETCH_AND_AND_16:
6917 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6918 target = expand_builtin_sync_operation (mode, exp, AND,
6919 false, target, ignore);
6920 if (target)
6921 return target;
6922 break;
6924 case BUILT_IN_FETCH_AND_XOR_1:
6925 case BUILT_IN_FETCH_AND_XOR_2:
6926 case BUILT_IN_FETCH_AND_XOR_4:
6927 case BUILT_IN_FETCH_AND_XOR_8:
6928 case BUILT_IN_FETCH_AND_XOR_16:
6929 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6930 target = expand_builtin_sync_operation (mode, exp, XOR,
6931 false, target, ignore);
6932 if (target)
6933 return target;
6934 break;
6936 case BUILT_IN_FETCH_AND_NAND_1:
6937 case BUILT_IN_FETCH_AND_NAND_2:
6938 case BUILT_IN_FETCH_AND_NAND_4:
6939 case BUILT_IN_FETCH_AND_NAND_8:
6940 case BUILT_IN_FETCH_AND_NAND_16:
6941 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6942 target = expand_builtin_sync_operation (mode, exp, NOT,
6943 false, target, ignore);
6944 if (target)
6945 return target;
6946 break;
6948 case BUILT_IN_ADD_AND_FETCH_1:
6949 case BUILT_IN_ADD_AND_FETCH_2:
6950 case BUILT_IN_ADD_AND_FETCH_4:
6951 case BUILT_IN_ADD_AND_FETCH_8:
6952 case BUILT_IN_ADD_AND_FETCH_16:
6953 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6954 target = expand_builtin_sync_operation (mode, exp, PLUS,
6955 true, target, ignore);
6956 if (target)
6957 return target;
6958 break;
6960 case BUILT_IN_SUB_AND_FETCH_1:
6961 case BUILT_IN_SUB_AND_FETCH_2:
6962 case BUILT_IN_SUB_AND_FETCH_4:
6963 case BUILT_IN_SUB_AND_FETCH_8:
6964 case BUILT_IN_SUB_AND_FETCH_16:
6965 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6966 target = expand_builtin_sync_operation (mode, exp, MINUS,
6967 true, target, ignore);
6968 if (target)
6969 return target;
6970 break;
6972 case BUILT_IN_OR_AND_FETCH_1:
6973 case BUILT_IN_OR_AND_FETCH_2:
6974 case BUILT_IN_OR_AND_FETCH_4:
6975 case BUILT_IN_OR_AND_FETCH_8:
6976 case BUILT_IN_OR_AND_FETCH_16:
6977 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6978 target = expand_builtin_sync_operation (mode, exp, IOR,
6979 true, target, ignore);
6980 if (target)
6981 return target;
6982 break;
6984 case BUILT_IN_AND_AND_FETCH_1:
6985 case BUILT_IN_AND_AND_FETCH_2:
6986 case BUILT_IN_AND_AND_FETCH_4:
6987 case BUILT_IN_AND_AND_FETCH_8:
6988 case BUILT_IN_AND_AND_FETCH_16:
6989 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6990 target = expand_builtin_sync_operation (mode, exp, AND,
6991 true, target, ignore);
6992 if (target)
6993 return target;
6994 break;
6996 case BUILT_IN_XOR_AND_FETCH_1:
6997 case BUILT_IN_XOR_AND_FETCH_2:
6998 case BUILT_IN_XOR_AND_FETCH_4:
6999 case BUILT_IN_XOR_AND_FETCH_8:
7000 case BUILT_IN_XOR_AND_FETCH_16:
7001 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7002 target = expand_builtin_sync_operation (mode, exp, XOR,
7003 true, target, ignore);
7004 if (target)
7005 return target;
7006 break;
7008 case BUILT_IN_NAND_AND_FETCH_1:
7009 case BUILT_IN_NAND_AND_FETCH_2:
7010 case BUILT_IN_NAND_AND_FETCH_4:
7011 case BUILT_IN_NAND_AND_FETCH_8:
7012 case BUILT_IN_NAND_AND_FETCH_16:
7013 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7014 target = expand_builtin_sync_operation (mode, exp, NOT,
7015 true, target, ignore);
7016 if (target)
7017 return target;
7018 break;
7020 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7021 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7022 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7023 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7024 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7025 if (mode == VOIDmode)
7026 mode = TYPE_MODE (boolean_type_node);
7027 if (!target || !register_operand (target, mode))
7028 target = gen_reg_rtx (mode);
7030 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7031 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7032 if (target)
7033 return target;
7034 break;
7036 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7037 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7038 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7039 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7040 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7041 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7042 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7043 if (target)
7044 return target;
7045 break;
7047 case BUILT_IN_LOCK_TEST_AND_SET_1:
7048 case BUILT_IN_LOCK_TEST_AND_SET_2:
7049 case BUILT_IN_LOCK_TEST_AND_SET_4:
7050 case BUILT_IN_LOCK_TEST_AND_SET_8:
7051 case BUILT_IN_LOCK_TEST_AND_SET_16:
7052 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7053 target = expand_builtin_lock_test_and_set (mode, exp, target);
7054 if (target)
7055 return target;
7056 break;
7058 case BUILT_IN_LOCK_RELEASE_1:
7059 case BUILT_IN_LOCK_RELEASE_2:
7060 case BUILT_IN_LOCK_RELEASE_4:
7061 case BUILT_IN_LOCK_RELEASE_8:
7062 case BUILT_IN_LOCK_RELEASE_16:
7063 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7064 expand_builtin_lock_release (mode, exp);
7065 return const0_rtx;
7067 case BUILT_IN_SYNCHRONIZE:
7068 expand_builtin_synchronize ();
7069 return const0_rtx;
7071 case BUILT_IN_OBJECT_SIZE:
7072 return expand_builtin_object_size (exp);
7074 case BUILT_IN_MEMCPY_CHK:
7075 case BUILT_IN_MEMPCPY_CHK:
7076 case BUILT_IN_MEMMOVE_CHK:
7077 case BUILT_IN_MEMSET_CHK:
7078 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7079 if (target)
7080 return target;
7081 break;
7083 case BUILT_IN_STRCPY_CHK:
7084 case BUILT_IN_STPCPY_CHK:
7085 case BUILT_IN_STRNCPY_CHK:
7086 case BUILT_IN_STRCAT_CHK:
7087 case BUILT_IN_STRNCAT_CHK:
7088 case BUILT_IN_SNPRINTF_CHK:
7089 case BUILT_IN_VSNPRINTF_CHK:
7090 maybe_emit_chk_warning (exp, fcode);
7091 break;
7093 case BUILT_IN_SPRINTF_CHK:
7094 case BUILT_IN_VSPRINTF_CHK:
7095 maybe_emit_sprintf_chk_warning (exp, fcode);
7096 break;
7098 case BUILT_IN_FREE:
7099 maybe_emit_free_warning (exp);
7100 break;
7102 default: /* just do library call, if unknown builtin */
7103 break;
7106 /* The switch statement above can drop through to cause the function
7107 to be called normally. */
7108 return expand_call (exp, target, ignore);
7111 /* Determine whether a tree node represents a call to a built-in
7112 function. If the tree T is a call to a built-in function with
7113 the right number of arguments of the appropriate types, return
7114 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7115 Otherwise the return value is END_BUILTINS. */
7117 enum built_in_function
7118 builtin_mathfn_code (const_tree t)
7120 const_tree fndecl, arg, parmlist;
7121 const_tree argtype, parmtype;
7122 const_call_expr_arg_iterator iter;
7124 if (TREE_CODE (t) != CALL_EXPR
7125 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7126 return END_BUILTINS;
7128 fndecl = get_callee_fndecl (t);
7129 if (fndecl == NULL_TREE
7130 || TREE_CODE (fndecl) != FUNCTION_DECL
7131 || ! DECL_BUILT_IN (fndecl)
7132 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7133 return END_BUILTINS;
7135 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7136 init_const_call_expr_arg_iterator (t, &iter);
7137 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7139 /* If a function doesn't take a variable number of arguments,
7140 the last element in the list will have type `void'. */
7141 parmtype = TREE_VALUE (parmlist);
7142 if (VOID_TYPE_P (parmtype))
7144 if (more_const_call_expr_args_p (&iter))
7145 return END_BUILTINS;
7146 return DECL_FUNCTION_CODE (fndecl);
7149 if (! more_const_call_expr_args_p (&iter))
7150 return END_BUILTINS;
7152 arg = next_const_call_expr_arg (&iter);
7153 argtype = TREE_TYPE (arg);
7155 if (SCALAR_FLOAT_TYPE_P (parmtype))
7157 if (! SCALAR_FLOAT_TYPE_P (argtype))
7158 return END_BUILTINS;
7160 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7162 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7163 return END_BUILTINS;
7165 else if (POINTER_TYPE_P (parmtype))
7167 if (! POINTER_TYPE_P (argtype))
7168 return END_BUILTINS;
7170 else if (INTEGRAL_TYPE_P (parmtype))
7172 if (! INTEGRAL_TYPE_P (argtype))
7173 return END_BUILTINS;
7175 else
7176 return END_BUILTINS;
7179 /* Variable-length argument list. */
7180 return DECL_FUNCTION_CODE (fndecl);
7183 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7184 evaluate to a constant. */
7186 static tree
7187 fold_builtin_constant_p (tree arg)
7189 /* We return 1 for a numeric type that's known to be a constant
7190 value at compile-time or for an aggregate type that's a
7191 literal constant. */
7192 STRIP_NOPS (arg);
7194 /* If we know this is a constant, emit the constant of one. */
7195 if (CONSTANT_CLASS_P (arg)
7196 || (TREE_CODE (arg) == CONSTRUCTOR
7197 && TREE_CONSTANT (arg)))
7198 return integer_one_node;
7199 if (TREE_CODE (arg) == ADDR_EXPR)
7201 tree op = TREE_OPERAND (arg, 0);
7202 if (TREE_CODE (op) == STRING_CST
7203 || (TREE_CODE (op) == ARRAY_REF
7204 && integer_zerop (TREE_OPERAND (op, 1))
7205 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7206 return integer_one_node;
7209 /* If this expression has side effects, show we don't know it to be a
7210 constant. Likewise if it's a pointer or aggregate type since in
7211 those case we only want literals, since those are only optimized
7212 when generating RTL, not later.
7213 And finally, if we are compiling an initializer, not code, we
7214 need to return a definite result now; there's not going to be any
7215 more optimization done. */
7216 if (TREE_SIDE_EFFECTS (arg)
7217 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7218 || POINTER_TYPE_P (TREE_TYPE (arg))
7219 || cfun == 0
7220 || folding_initializer)
7221 return integer_zero_node;
7223 return NULL_TREE;
7226 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7227 return it as a truthvalue. */
7229 static tree
7230 build_builtin_expect_predicate (tree pred, tree expected)
7232 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7234 fn = built_in_decls[BUILT_IN_EXPECT];
7235 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7236 ret_type = TREE_TYPE (TREE_TYPE (fn));
7237 pred_type = TREE_VALUE (arg_types);
7238 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7240 pred = fold_convert (pred_type, pred);
7241 expected = fold_convert (expected_type, expected);
7242 call_expr = build_call_expr (fn, 2, pred, expected);
7244 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7245 build_int_cst (ret_type, 0));
7248 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7249 NULL_TREE if no simplification is possible. */
7251 static tree
7252 fold_builtin_expect (tree arg0, tree arg1)
7254 tree inner, fndecl;
7255 enum tree_code code;
7257 /* If this is a builtin_expect within a builtin_expect keep the
7258 inner one. See through a comparison against a constant. It
7259 might have been added to create a thruthvalue. */
7260 inner = arg0;
7261 if (COMPARISON_CLASS_P (inner)
7262 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7263 inner = TREE_OPERAND (inner, 0);
7265 if (TREE_CODE (inner) == CALL_EXPR
7266 && (fndecl = get_callee_fndecl (inner))
7267 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7268 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7269 return arg0;
7271 /* Distribute the expected value over short-circuiting operators.
7272 See through the cast from truthvalue_type_node to long. */
7273 inner = arg0;
7274 while (TREE_CODE (inner) == NOP_EXPR
7275 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7276 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7277 inner = TREE_OPERAND (inner, 0);
7279 code = TREE_CODE (inner);
7280 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7282 tree op0 = TREE_OPERAND (inner, 0);
7283 tree op1 = TREE_OPERAND (inner, 1);
7285 op0 = build_builtin_expect_predicate (op0, arg1);
7286 op1 = build_builtin_expect_predicate (op1, arg1);
7287 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7289 return fold_convert (TREE_TYPE (arg0), inner);
7292 /* If the argument isn't invariant then there's nothing else we can do. */
7293 if (!TREE_CONSTANT (arg0))
7294 return NULL_TREE;
7296 /* If we expect that a comparison against the argument will fold to
7297 a constant return the constant. In practice, this means a true
7298 constant or the address of a non-weak symbol. */
7299 inner = arg0;
7300 STRIP_NOPS (inner);
7301 if (TREE_CODE (inner) == ADDR_EXPR)
7305 inner = TREE_OPERAND (inner, 0);
7307 while (TREE_CODE (inner) == COMPONENT_REF
7308 || TREE_CODE (inner) == ARRAY_REF);
7309 if (DECL_P (inner) && DECL_WEAK (inner))
7310 return NULL_TREE;
7313 /* Otherwise, ARG0 already has the proper type for the return value. */
7314 return arg0;
7317 /* Fold a call to __builtin_classify_type with argument ARG. */
7319 static tree
7320 fold_builtin_classify_type (tree arg)
7322 if (arg == 0)
7323 return build_int_cst (NULL_TREE, no_type_class);
7325 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7328 /* Fold a call to __builtin_strlen with argument ARG. */
7330 static tree
7331 fold_builtin_strlen (tree arg)
7333 if (!validate_arg (arg, POINTER_TYPE))
7334 return NULL_TREE;
7335 else
7337 tree len = c_strlen (arg, 0);
7339 if (len)
7341 /* Convert from the internal "sizetype" type to "size_t". */
7342 if (size_type_node)
7343 len = fold_convert (size_type_node, len);
7344 return len;
7347 return NULL_TREE;
7351 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7353 static tree
7354 fold_builtin_inf (tree type, int warn)
7356 REAL_VALUE_TYPE real;
7358 /* __builtin_inff is intended to be usable to define INFINITY on all
7359 targets. If an infinity is not available, INFINITY expands "to a
7360 positive constant of type float that overflows at translation
7361 time", footnote "In this case, using INFINITY will violate the
7362 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7363 Thus we pedwarn to ensure this constraint violation is
7364 diagnosed. */
7365 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7366 pedwarn (input_location, 0, "target format does not support infinity");
7368 real_inf (&real);
7369 return build_real (type, real);
7372 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7374 static tree
7375 fold_builtin_nan (tree arg, tree type, int quiet)
7377 REAL_VALUE_TYPE real;
7378 const char *str;
7380 if (!validate_arg (arg, POINTER_TYPE))
7381 return NULL_TREE;
7382 str = c_getstr (arg);
7383 if (!str)
7384 return NULL_TREE;
7386 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7387 return NULL_TREE;
7389 return build_real (type, real);
7392 /* Return true if the floating point expression T has an integer value.
7393 We also allow +Inf, -Inf and NaN to be considered integer values. */
7395 static bool
7396 integer_valued_real_p (tree t)
7398 switch (TREE_CODE (t))
7400 case FLOAT_EXPR:
7401 return true;
7403 case ABS_EXPR:
7404 case SAVE_EXPR:
7405 return integer_valued_real_p (TREE_OPERAND (t, 0));
7407 case COMPOUND_EXPR:
7408 case MODIFY_EXPR:
7409 case BIND_EXPR:
7410 return integer_valued_real_p (TREE_OPERAND (t, 1));
7412 case PLUS_EXPR:
7413 case MINUS_EXPR:
7414 case MULT_EXPR:
7415 case MIN_EXPR:
7416 case MAX_EXPR:
7417 return integer_valued_real_p (TREE_OPERAND (t, 0))
7418 && integer_valued_real_p (TREE_OPERAND (t, 1));
7420 case COND_EXPR:
7421 return integer_valued_real_p (TREE_OPERAND (t, 1))
7422 && integer_valued_real_p (TREE_OPERAND (t, 2));
7424 case REAL_CST:
7425 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7427 case NOP_EXPR:
7429 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7430 if (TREE_CODE (type) == INTEGER_TYPE)
7431 return true;
7432 if (TREE_CODE (type) == REAL_TYPE)
7433 return integer_valued_real_p (TREE_OPERAND (t, 0));
7434 break;
7437 case CALL_EXPR:
7438 switch (builtin_mathfn_code (t))
7440 CASE_FLT_FN (BUILT_IN_CEIL):
7441 CASE_FLT_FN (BUILT_IN_FLOOR):
7442 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7443 CASE_FLT_FN (BUILT_IN_RINT):
7444 CASE_FLT_FN (BUILT_IN_ROUND):
7445 CASE_FLT_FN (BUILT_IN_TRUNC):
7446 return true;
7448 CASE_FLT_FN (BUILT_IN_FMIN):
7449 CASE_FLT_FN (BUILT_IN_FMAX):
7450 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7451 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7453 default:
7454 break;
7456 break;
7458 default:
7459 break;
7461 return false;
7464 /* FNDECL is assumed to be a builtin where truncation can be propagated
7465 across (for instance floor((double)f) == (double)floorf (f).
7466 Do the transformation for a call with argument ARG. */
7468 static tree
7469 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7471 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7473 if (!validate_arg (arg, REAL_TYPE))
7474 return NULL_TREE;
7476 /* Integer rounding functions are idempotent. */
7477 if (fcode == builtin_mathfn_code (arg))
7478 return arg;
7480 /* If argument is already integer valued, and we don't need to worry
7481 about setting errno, there's no need to perform rounding. */
7482 if (! flag_errno_math && integer_valued_real_p (arg))
7483 return arg;
7485 if (optimize)
7487 tree arg0 = strip_float_extensions (arg);
7488 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7489 tree newtype = TREE_TYPE (arg0);
7490 tree decl;
7492 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7493 && (decl = mathfn_built_in (newtype, fcode)))
7494 return fold_convert (ftype,
7495 build_call_expr (decl, 1,
7496 fold_convert (newtype, arg0)));
7498 return NULL_TREE;
7501 /* FNDECL is assumed to be builtin which can narrow the FP type of
7502 the argument, for instance lround((double)f) -> lroundf (f).
7503 Do the transformation for a call with argument ARG. */
7505 static tree
7506 fold_fixed_mathfn (tree fndecl, tree arg)
7508 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7510 if (!validate_arg (arg, REAL_TYPE))
7511 return NULL_TREE;
7513 /* If argument is already integer valued, and we don't need to worry
7514 about setting errno, there's no need to perform rounding. */
7515 if (! flag_errno_math && integer_valued_real_p (arg))
7516 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7518 if (optimize)
7520 tree ftype = TREE_TYPE (arg);
7521 tree arg0 = strip_float_extensions (arg);
7522 tree newtype = TREE_TYPE (arg0);
7523 tree decl;
7525 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7526 && (decl = mathfn_built_in (newtype, fcode)))
7527 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7530 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7531 sizeof (long long) == sizeof (long). */
7532 if (TYPE_PRECISION (long_long_integer_type_node)
7533 == TYPE_PRECISION (long_integer_type_node))
7535 tree newfn = NULL_TREE;
7536 switch (fcode)
7538 CASE_FLT_FN (BUILT_IN_LLCEIL):
7539 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7540 break;
7542 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7543 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7544 break;
7546 CASE_FLT_FN (BUILT_IN_LLROUND):
7547 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7548 break;
7550 CASE_FLT_FN (BUILT_IN_LLRINT):
7551 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7552 break;
7554 default:
7555 break;
7558 if (newfn)
7560 tree newcall = build_call_expr(newfn, 1, arg);
7561 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7565 return NULL_TREE;
7568 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7569 return type. Return NULL_TREE if no simplification can be made. */
7571 static tree
7572 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7574 tree res;
7576 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7577 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7578 return NULL_TREE;
7580 /* Calculate the result when the argument is a constant. */
7581 if (TREE_CODE (arg) == COMPLEX_CST
7582 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7583 type, mpfr_hypot)))
7584 return res;
7586 if (TREE_CODE (arg) == COMPLEX_EXPR)
7588 tree real = TREE_OPERAND (arg, 0);
7589 tree imag = TREE_OPERAND (arg, 1);
7591 /* If either part is zero, cabs is fabs of the other. */
7592 if (real_zerop (real))
7593 return fold_build1 (ABS_EXPR, type, imag);
7594 if (real_zerop (imag))
7595 return fold_build1 (ABS_EXPR, type, real);
7597 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7598 if (flag_unsafe_math_optimizations
7599 && operand_equal_p (real, imag, OEP_PURE_SAME))
7601 const REAL_VALUE_TYPE sqrt2_trunc
7602 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7603 STRIP_NOPS (real);
7604 return fold_build2 (MULT_EXPR, type,
7605 fold_build1 (ABS_EXPR, type, real),
7606 build_real (type, sqrt2_trunc));
7610 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7611 if (TREE_CODE (arg) == NEGATE_EXPR
7612 || TREE_CODE (arg) == CONJ_EXPR)
7613 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7615 /* Don't do this when optimizing for size. */
7616 if (flag_unsafe_math_optimizations
7617 && optimize && optimize_function_for_speed_p (cfun))
7619 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7621 if (sqrtfn != NULL_TREE)
7623 tree rpart, ipart, result;
7625 arg = builtin_save_expr (arg);
7627 rpart = fold_build1 (REALPART_EXPR, type, arg);
7628 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7630 rpart = builtin_save_expr (rpart);
7631 ipart = builtin_save_expr (ipart);
7633 result = fold_build2 (PLUS_EXPR, type,
7634 fold_build2 (MULT_EXPR, type,
7635 rpart, rpart),
7636 fold_build2 (MULT_EXPR, type,
7637 ipart, ipart));
7639 return build_call_expr (sqrtfn, 1, result);
7643 return NULL_TREE;
7646 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7647 Return NULL_TREE if no simplification can be made. */
7649 static tree
7650 fold_builtin_sqrt (tree arg, tree type)
7653 enum built_in_function fcode;
7654 tree res;
7656 if (!validate_arg (arg, REAL_TYPE))
7657 return NULL_TREE;
7659 /* Calculate the result when the argument is a constant. */
7660 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7661 return res;
7663 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7664 fcode = builtin_mathfn_code (arg);
7665 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7667 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7668 arg = fold_build2 (MULT_EXPR, type,
7669 CALL_EXPR_ARG (arg, 0),
7670 build_real (type, dconsthalf));
7671 return build_call_expr (expfn, 1, arg);
7674 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7675 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7677 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7679 if (powfn)
7681 tree arg0 = CALL_EXPR_ARG (arg, 0);
7682 tree tree_root;
7683 /* The inner root was either sqrt or cbrt. */
7684 /* This was a conditional expression but it triggered a bug
7685 in the Solaris 8 compiler. */
7686 REAL_VALUE_TYPE dconstroot;
7687 if (BUILTIN_SQRT_P (fcode))
7688 dconstroot = dconsthalf;
7689 else
7690 dconstroot = dconst_third ();
7692 /* Adjust for the outer root. */
7693 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7694 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7695 tree_root = build_real (type, dconstroot);
7696 return build_call_expr (powfn, 2, arg0, tree_root);
7700 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7701 if (flag_unsafe_math_optimizations
7702 && (fcode == BUILT_IN_POW
7703 || fcode == BUILT_IN_POWF
7704 || fcode == BUILT_IN_POWL))
7706 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7707 tree arg0 = CALL_EXPR_ARG (arg, 0);
7708 tree arg1 = CALL_EXPR_ARG (arg, 1);
7709 tree narg1;
7710 if (!tree_expr_nonnegative_p (arg0))
7711 arg0 = build1 (ABS_EXPR, type, arg0);
7712 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7713 build_real (type, dconsthalf));
7714 return build_call_expr (powfn, 2, arg0, narg1);
7717 return NULL_TREE;
7720 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7721 Return NULL_TREE if no simplification can be made. */
7723 static tree
7724 fold_builtin_cbrt (tree arg, tree type)
7726 const enum built_in_function fcode = builtin_mathfn_code (arg);
7727 tree res;
7729 if (!validate_arg (arg, REAL_TYPE))
7730 return NULL_TREE;
7732 /* Calculate the result when the argument is a constant. */
7733 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7734 return res;
7736 if (flag_unsafe_math_optimizations)
7738 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7739 if (BUILTIN_EXPONENT_P (fcode))
7741 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7742 const REAL_VALUE_TYPE third_trunc =
7743 real_value_truncate (TYPE_MODE (type), dconst_third ());
7744 arg = fold_build2 (MULT_EXPR, type,
7745 CALL_EXPR_ARG (arg, 0),
7746 build_real (type, third_trunc));
7747 return build_call_expr (expfn, 1, arg);
7750 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7751 if (BUILTIN_SQRT_P (fcode))
7753 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7755 if (powfn)
7757 tree arg0 = CALL_EXPR_ARG (arg, 0);
7758 tree tree_root;
7759 REAL_VALUE_TYPE dconstroot = dconst_third ();
7761 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7762 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7763 tree_root = build_real (type, dconstroot);
7764 return build_call_expr (powfn, 2, arg0, tree_root);
7768 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7769 if (BUILTIN_CBRT_P (fcode))
7771 tree arg0 = CALL_EXPR_ARG (arg, 0);
7772 if (tree_expr_nonnegative_p (arg0))
7774 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7776 if (powfn)
7778 tree tree_root;
7779 REAL_VALUE_TYPE dconstroot;
7781 real_arithmetic (&dconstroot, MULT_EXPR,
7782 dconst_third_ptr (), dconst_third_ptr ());
7783 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7784 tree_root = build_real (type, dconstroot);
7785 return build_call_expr (powfn, 2, arg0, tree_root);
7790 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7791 if (fcode == BUILT_IN_POW
7792 || fcode == BUILT_IN_POWF
7793 || fcode == BUILT_IN_POWL)
7795 tree arg00 = CALL_EXPR_ARG (arg, 0);
7796 tree arg01 = CALL_EXPR_ARG (arg, 1);
7797 if (tree_expr_nonnegative_p (arg00))
7799 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7800 const REAL_VALUE_TYPE dconstroot
7801 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7802 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7803 build_real (type, dconstroot));
7804 return build_call_expr (powfn, 2, arg00, narg01);
7808 return NULL_TREE;
7811 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7812 TYPE is the type of the return value. Return NULL_TREE if no
7813 simplification can be made. */
7815 static tree
7816 fold_builtin_cos (tree arg, tree type, tree fndecl)
7818 tree res, narg;
7820 if (!validate_arg (arg, REAL_TYPE))
7821 return NULL_TREE;
7823 /* Calculate the result when the argument is a constant. */
7824 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7825 return res;
7827 /* Optimize cos(-x) into cos (x). */
7828 if ((narg = fold_strip_sign_ops (arg)))
7829 return build_call_expr (fndecl, 1, narg);
7831 return NULL_TREE;
7834 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7835 Return NULL_TREE if no simplification can be made. */
7837 static tree
7838 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7840 if (validate_arg (arg, REAL_TYPE))
7842 tree res, narg;
7844 /* Calculate the result when the argument is a constant. */
7845 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7846 return res;
7848 /* Optimize cosh(-x) into cosh (x). */
7849 if ((narg = fold_strip_sign_ops (arg)))
7850 return build_call_expr (fndecl, 1, narg);
7853 return NULL_TREE;
7856 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7857 Return NULL_TREE if no simplification can be made. */
7859 static tree
7860 fold_builtin_tan (tree arg, tree type)
7862 enum built_in_function fcode;
7863 tree res;
7865 if (!validate_arg (arg, REAL_TYPE))
7866 return NULL_TREE;
7868 /* Calculate the result when the argument is a constant. */
7869 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7870 return res;
7872 /* Optimize tan(atan(x)) = x. */
7873 fcode = builtin_mathfn_code (arg);
7874 if (flag_unsafe_math_optimizations
7875 && (fcode == BUILT_IN_ATAN
7876 || fcode == BUILT_IN_ATANF
7877 || fcode == BUILT_IN_ATANL))
7878 return CALL_EXPR_ARG (arg, 0);
7880 return NULL_TREE;
7883 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7884 NULL_TREE if no simplification can be made. */
7886 static tree
7887 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7889 tree type;
7890 tree res, fn, call;
7892 if (!validate_arg (arg0, REAL_TYPE)
7893 || !validate_arg (arg1, POINTER_TYPE)
7894 || !validate_arg (arg2, POINTER_TYPE))
7895 return NULL_TREE;
7897 type = TREE_TYPE (arg0);
7899 /* Calculate the result when the argument is a constant. */
7900 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7901 return res;
7903 /* Canonicalize sincos to cexpi. */
7904 if (!TARGET_C99_FUNCTIONS)
7905 return NULL_TREE;
7906 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7907 if (!fn)
7908 return NULL_TREE;
7910 call = build_call_expr (fn, 1, arg0);
7911 call = builtin_save_expr (call);
7913 return build2 (COMPOUND_EXPR, type,
7914 build2 (MODIFY_EXPR, void_type_node,
7915 build_fold_indirect_ref (arg1),
7916 build1 (IMAGPART_EXPR, type, call)),
7917 build2 (MODIFY_EXPR, void_type_node,
7918 build_fold_indirect_ref (arg2),
7919 build1 (REALPART_EXPR, type, call)));
7922 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7923 NULL_TREE if no simplification can be made. */
7925 static tree
7926 fold_builtin_cexp (tree arg0, tree type)
7928 tree rtype;
7929 tree realp, imagp, ifn;
7931 if (!validate_arg (arg0, COMPLEX_TYPE))
7932 return NULL_TREE;
7934 rtype = TREE_TYPE (TREE_TYPE (arg0));
7936 /* In case we can figure out the real part of arg0 and it is constant zero
7937 fold to cexpi. */
7938 if (!TARGET_C99_FUNCTIONS)
7939 return NULL_TREE;
7940 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7941 if (!ifn)
7942 return NULL_TREE;
7944 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7945 && real_zerop (realp))
7947 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7948 return build_call_expr (ifn, 1, narg);
7951 /* In case we can easily decompose real and imaginary parts split cexp
7952 to exp (r) * cexpi (i). */
7953 if (flag_unsafe_math_optimizations
7954 && realp)
7956 tree rfn, rcall, icall;
7958 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7959 if (!rfn)
7960 return NULL_TREE;
7962 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7963 if (!imagp)
7964 return NULL_TREE;
7966 icall = build_call_expr (ifn, 1, imagp);
7967 icall = builtin_save_expr (icall);
7968 rcall = build_call_expr (rfn, 1, realp);
7969 rcall = builtin_save_expr (rcall);
7970 return fold_build2 (COMPLEX_EXPR, type,
7971 fold_build2 (MULT_EXPR, rtype,
7972 rcall,
7973 fold_build1 (REALPART_EXPR, rtype, icall)),
7974 fold_build2 (MULT_EXPR, rtype,
7975 rcall,
7976 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7979 return NULL_TREE;
7982 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7983 Return NULL_TREE if no simplification can be made. */
7985 static tree
7986 fold_builtin_trunc (tree fndecl, tree arg)
7988 if (!validate_arg (arg, REAL_TYPE))
7989 return NULL_TREE;
7991 /* Optimize trunc of constant value. */
7992 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7994 REAL_VALUE_TYPE r, x;
7995 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7997 x = TREE_REAL_CST (arg);
7998 real_trunc (&r, TYPE_MODE (type), &x);
7999 return build_real (type, r);
8002 return fold_trunc_transparent_mathfn (fndecl, arg);
8005 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8006 Return NULL_TREE if no simplification can be made. */
8008 static tree
8009 fold_builtin_floor (tree fndecl, tree arg)
8011 if (!validate_arg (arg, REAL_TYPE))
8012 return NULL_TREE;
8014 /* Optimize floor of constant value. */
8015 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8017 REAL_VALUE_TYPE x;
8019 x = TREE_REAL_CST (arg);
8020 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8022 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8023 REAL_VALUE_TYPE r;
8025 real_floor (&r, TYPE_MODE (type), &x);
8026 return build_real (type, r);
8030 /* Fold floor (x) where x is nonnegative to trunc (x). */
8031 if (tree_expr_nonnegative_p (arg))
8033 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8034 if (truncfn)
8035 return build_call_expr (truncfn, 1, arg);
8038 return fold_trunc_transparent_mathfn (fndecl, arg);
8041 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8042 Return NULL_TREE if no simplification can be made. */
8044 static tree
8045 fold_builtin_ceil (tree fndecl, tree arg)
8047 if (!validate_arg (arg, REAL_TYPE))
8048 return NULL_TREE;
8050 /* Optimize ceil of constant value. */
8051 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8053 REAL_VALUE_TYPE x;
8055 x = TREE_REAL_CST (arg);
8056 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8058 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8059 REAL_VALUE_TYPE r;
8061 real_ceil (&r, TYPE_MODE (type), &x);
8062 return build_real (type, r);
8066 return fold_trunc_transparent_mathfn (fndecl, arg);
8069 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8070 Return NULL_TREE if no simplification can be made. */
8072 static tree
8073 fold_builtin_round (tree fndecl, tree arg)
8075 if (!validate_arg (arg, REAL_TYPE))
8076 return NULL_TREE;
8078 /* Optimize round of constant value. */
8079 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8081 REAL_VALUE_TYPE x;
8083 x = TREE_REAL_CST (arg);
8084 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8086 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8087 REAL_VALUE_TYPE r;
8089 real_round (&r, TYPE_MODE (type), &x);
8090 return build_real (type, r);
8094 return fold_trunc_transparent_mathfn (fndecl, arg);
8097 /* Fold function call to builtin lround, lroundf or lroundl (or the
8098 corresponding long long versions) and other rounding functions. ARG
8099 is the argument to the call. Return NULL_TREE if no simplification
8100 can be made. */
8102 static tree
8103 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8105 if (!validate_arg (arg, REAL_TYPE))
8106 return NULL_TREE;
8108 /* Optimize lround of constant value. */
8109 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8111 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8113 if (real_isfinite (&x))
8115 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8116 tree ftype = TREE_TYPE (arg);
8117 unsigned HOST_WIDE_INT lo2;
8118 HOST_WIDE_INT hi, lo;
8119 REAL_VALUE_TYPE r;
8121 switch (DECL_FUNCTION_CODE (fndecl))
8123 CASE_FLT_FN (BUILT_IN_LFLOOR):
8124 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8125 real_floor (&r, TYPE_MODE (ftype), &x);
8126 break;
8128 CASE_FLT_FN (BUILT_IN_LCEIL):
8129 CASE_FLT_FN (BUILT_IN_LLCEIL):
8130 real_ceil (&r, TYPE_MODE (ftype), &x);
8131 break;
8133 CASE_FLT_FN (BUILT_IN_LROUND):
8134 CASE_FLT_FN (BUILT_IN_LLROUND):
8135 real_round (&r, TYPE_MODE (ftype), &x);
8136 break;
8138 default:
8139 gcc_unreachable ();
8142 REAL_VALUE_TO_INT (&lo, &hi, r);
8143 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8144 return build_int_cst_wide (itype, lo2, hi);
8148 switch (DECL_FUNCTION_CODE (fndecl))
8150 CASE_FLT_FN (BUILT_IN_LFLOOR):
8151 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8152 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8153 if (tree_expr_nonnegative_p (arg))
8154 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8155 arg);
8156 break;
8157 default:;
8160 return fold_fixed_mathfn (fndecl, arg);
8163 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8164 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8165 the argument to the call. Return NULL_TREE if no simplification can
8166 be made. */
8168 static tree
8169 fold_builtin_bitop (tree fndecl, tree arg)
8171 if (!validate_arg (arg, INTEGER_TYPE))
8172 return NULL_TREE;
8174 /* Optimize for constant argument. */
8175 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8177 HOST_WIDE_INT hi, width, result;
8178 unsigned HOST_WIDE_INT lo;
8179 tree type;
8181 type = TREE_TYPE (arg);
8182 width = TYPE_PRECISION (type);
8183 lo = TREE_INT_CST_LOW (arg);
8185 /* Clear all the bits that are beyond the type's precision. */
8186 if (width > HOST_BITS_PER_WIDE_INT)
8188 hi = TREE_INT_CST_HIGH (arg);
8189 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8190 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8192 else
8194 hi = 0;
8195 if (width < HOST_BITS_PER_WIDE_INT)
8196 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8199 switch (DECL_FUNCTION_CODE (fndecl))
8201 CASE_INT_FN (BUILT_IN_FFS):
8202 if (lo != 0)
8203 result = exact_log2 (lo & -lo) + 1;
8204 else if (hi != 0)
8205 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8206 else
8207 result = 0;
8208 break;
8210 CASE_INT_FN (BUILT_IN_CLZ):
8211 if (hi != 0)
8212 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8213 else if (lo != 0)
8214 result = width - floor_log2 (lo) - 1;
8215 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8216 result = width;
8217 break;
8219 CASE_INT_FN (BUILT_IN_CTZ):
8220 if (lo != 0)
8221 result = exact_log2 (lo & -lo);
8222 else if (hi != 0)
8223 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8224 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8225 result = width;
8226 break;
8228 CASE_INT_FN (BUILT_IN_POPCOUNT):
8229 result = 0;
8230 while (lo)
8231 result++, lo &= lo - 1;
8232 while (hi)
8233 result++, hi &= hi - 1;
8234 break;
8236 CASE_INT_FN (BUILT_IN_PARITY):
8237 result = 0;
8238 while (lo)
8239 result++, lo &= lo - 1;
8240 while (hi)
8241 result++, hi &= hi - 1;
8242 result &= 1;
8243 break;
8245 default:
8246 gcc_unreachable ();
8249 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8252 return NULL_TREE;
8255 /* Fold function call to builtin_bswap and the long and long long
8256 variants. Return NULL_TREE if no simplification can be made. */
8257 static tree
8258 fold_builtin_bswap (tree fndecl, tree arg)
8260 if (! validate_arg (arg, INTEGER_TYPE))
8261 return NULL_TREE;
8263 /* Optimize constant value. */
8264 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8266 HOST_WIDE_INT hi, width, r_hi = 0;
8267 unsigned HOST_WIDE_INT lo, r_lo = 0;
8268 tree type;
8270 type = TREE_TYPE (arg);
8271 width = TYPE_PRECISION (type);
8272 lo = TREE_INT_CST_LOW (arg);
8273 hi = TREE_INT_CST_HIGH (arg);
8275 switch (DECL_FUNCTION_CODE (fndecl))
8277 case BUILT_IN_BSWAP32:
8278 case BUILT_IN_BSWAP64:
8280 int s;
8282 for (s = 0; s < width; s += 8)
8284 int d = width - s - 8;
8285 unsigned HOST_WIDE_INT byte;
8287 if (s < HOST_BITS_PER_WIDE_INT)
8288 byte = (lo >> s) & 0xff;
8289 else
8290 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8292 if (d < HOST_BITS_PER_WIDE_INT)
8293 r_lo |= byte << d;
8294 else
8295 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8299 break;
8301 default:
8302 gcc_unreachable ();
8305 if (width < HOST_BITS_PER_WIDE_INT)
8306 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8307 else
8308 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8311 return NULL_TREE;
8314 /* Return true if EXPR is the real constant contained in VALUE. */
8316 static bool
8317 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8319 STRIP_NOPS (expr);
8321 return ((TREE_CODE (expr) == REAL_CST
8322 && !TREE_OVERFLOW (expr)
8323 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8324 || (TREE_CODE (expr) == COMPLEX_CST
8325 && real_dconstp (TREE_REALPART (expr), value)
8326 && real_zerop (TREE_IMAGPART (expr))));
8329 /* A subroutine of fold_builtin to fold the various logarithmic
8330 functions. Return NULL_TREE if no simplification can me made.
8331 FUNC is the corresponding MPFR logarithm function. */
8333 static tree
8334 fold_builtin_logarithm (tree fndecl, tree arg,
8335 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8337 if (validate_arg (arg, REAL_TYPE))
8339 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8340 tree res;
8341 const enum built_in_function fcode = builtin_mathfn_code (arg);
8343 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8344 instead we'll look for 'e' truncated to MODE. So only do
8345 this if flag_unsafe_math_optimizations is set. */
8346 if (flag_unsafe_math_optimizations && func == mpfr_log)
8348 const REAL_VALUE_TYPE e_truncated =
8349 real_value_truncate (TYPE_MODE (type), dconst_e ());
8350 if (real_dconstp (arg, &e_truncated))
8351 return build_real (type, dconst1);
8354 /* Calculate the result when the argument is a constant. */
8355 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8356 return res;
8358 /* Special case, optimize logN(expN(x)) = x. */
8359 if (flag_unsafe_math_optimizations
8360 && ((func == mpfr_log
8361 && (fcode == BUILT_IN_EXP
8362 || fcode == BUILT_IN_EXPF
8363 || fcode == BUILT_IN_EXPL))
8364 || (func == mpfr_log2
8365 && (fcode == BUILT_IN_EXP2
8366 || fcode == BUILT_IN_EXP2F
8367 || fcode == BUILT_IN_EXP2L))
8368 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8369 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8371 /* Optimize logN(func()) for various exponential functions. We
8372 want to determine the value "x" and the power "exponent" in
8373 order to transform logN(x**exponent) into exponent*logN(x). */
8374 if (flag_unsafe_math_optimizations)
8376 tree exponent = 0, x = 0;
8378 switch (fcode)
8380 CASE_FLT_FN (BUILT_IN_EXP):
8381 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8382 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8383 dconst_e ()));
8384 exponent = CALL_EXPR_ARG (arg, 0);
8385 break;
8386 CASE_FLT_FN (BUILT_IN_EXP2):
8387 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8388 x = build_real (type, dconst2);
8389 exponent = CALL_EXPR_ARG (arg, 0);
8390 break;
8391 CASE_FLT_FN (BUILT_IN_EXP10):
8392 CASE_FLT_FN (BUILT_IN_POW10):
8393 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8395 REAL_VALUE_TYPE dconst10;
8396 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8397 x = build_real (type, dconst10);
8399 exponent = CALL_EXPR_ARG (arg, 0);
8400 break;
8401 CASE_FLT_FN (BUILT_IN_SQRT):
8402 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8403 x = CALL_EXPR_ARG (arg, 0);
8404 exponent = build_real (type, dconsthalf);
8405 break;
8406 CASE_FLT_FN (BUILT_IN_CBRT):
8407 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8408 x = CALL_EXPR_ARG (arg, 0);
8409 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8410 dconst_third ()));
8411 break;
8412 CASE_FLT_FN (BUILT_IN_POW):
8413 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8414 x = CALL_EXPR_ARG (arg, 0);
8415 exponent = CALL_EXPR_ARG (arg, 1);
8416 break;
8417 default:
8418 break;
8421 /* Now perform the optimization. */
8422 if (x && exponent)
8424 tree logfn = build_call_expr (fndecl, 1, x);
8425 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8430 return NULL_TREE;
8433 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8434 NULL_TREE if no simplification can be made. */
8436 static tree
8437 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8439 tree res, narg0, narg1;
8441 if (!validate_arg (arg0, REAL_TYPE)
8442 || !validate_arg (arg1, REAL_TYPE))
8443 return NULL_TREE;
8445 /* Calculate the result when the argument is a constant. */
8446 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8447 return res;
8449 /* If either argument to hypot has a negate or abs, strip that off.
8450 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8451 narg0 = fold_strip_sign_ops (arg0);
8452 narg1 = fold_strip_sign_ops (arg1);
8453 if (narg0 || narg1)
8455 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8456 narg1 ? narg1 : arg1);
8459 /* If either argument is zero, hypot is fabs of the other. */
8460 if (real_zerop (arg0))
8461 return fold_build1 (ABS_EXPR, type, arg1);
8462 else if (real_zerop (arg1))
8463 return fold_build1 (ABS_EXPR, type, arg0);
8465 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8466 if (flag_unsafe_math_optimizations
8467 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8469 const REAL_VALUE_TYPE sqrt2_trunc
8470 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8471 return fold_build2 (MULT_EXPR, type,
8472 fold_build1 (ABS_EXPR, type, arg0),
8473 build_real (type, sqrt2_trunc));
8476 return NULL_TREE;
8480 /* Fold a builtin function call to pow, powf, or powl. Return
8481 NULL_TREE if no simplification can be made. */
8482 static tree
8483 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8485 tree res;
8487 if (!validate_arg (arg0, REAL_TYPE)
8488 || !validate_arg (arg1, REAL_TYPE))
8489 return NULL_TREE;
8491 /* Calculate the result when the argument is a constant. */
8492 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8493 return res;
8495 /* Optimize pow(1.0,y) = 1.0. */
8496 if (real_onep (arg0))
8497 return omit_one_operand (type, build_real (type, dconst1), arg1);
8499 if (TREE_CODE (arg1) == REAL_CST
8500 && !TREE_OVERFLOW (arg1))
8502 REAL_VALUE_TYPE cint;
8503 REAL_VALUE_TYPE c;
8504 HOST_WIDE_INT n;
8506 c = TREE_REAL_CST (arg1);
8508 /* Optimize pow(x,0.0) = 1.0. */
8509 if (REAL_VALUES_EQUAL (c, dconst0))
8510 return omit_one_operand (type, build_real (type, dconst1),
8511 arg0);
8513 /* Optimize pow(x,1.0) = x. */
8514 if (REAL_VALUES_EQUAL (c, dconst1))
8515 return arg0;
8517 /* Optimize pow(x,-1.0) = 1.0/x. */
8518 if (REAL_VALUES_EQUAL (c, dconstm1))
8519 return fold_build2 (RDIV_EXPR, type,
8520 build_real (type, dconst1), arg0);
8522 /* Optimize pow(x,0.5) = sqrt(x). */
8523 if (flag_unsafe_math_optimizations
8524 && REAL_VALUES_EQUAL (c, dconsthalf))
8526 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8528 if (sqrtfn != NULL_TREE)
8529 return build_call_expr (sqrtfn, 1, arg0);
8532 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8533 if (flag_unsafe_math_optimizations)
8535 const REAL_VALUE_TYPE dconstroot
8536 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8538 if (REAL_VALUES_EQUAL (c, dconstroot))
8540 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8541 if (cbrtfn != NULL_TREE)
8542 return build_call_expr (cbrtfn, 1, arg0);
8546 /* Check for an integer exponent. */
8547 n = real_to_integer (&c);
8548 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8549 if (real_identical (&c, &cint))
8551 /* Attempt to evaluate pow at compile-time, unless this should
8552 raise an exception. */
8553 if (TREE_CODE (arg0) == REAL_CST
8554 && !TREE_OVERFLOW (arg0)
8555 && (n > 0
8556 || (!flag_trapping_math && !flag_errno_math)
8557 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8559 REAL_VALUE_TYPE x;
8560 bool inexact;
8562 x = TREE_REAL_CST (arg0);
8563 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8564 if (flag_unsafe_math_optimizations || !inexact)
8565 return build_real (type, x);
8568 /* Strip sign ops from even integer powers. */
8569 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8571 tree narg0 = fold_strip_sign_ops (arg0);
8572 if (narg0)
8573 return build_call_expr (fndecl, 2, narg0, arg1);
8578 if (flag_unsafe_math_optimizations)
8580 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8582 /* Optimize pow(expN(x),y) = expN(x*y). */
8583 if (BUILTIN_EXPONENT_P (fcode))
8585 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8586 tree arg = CALL_EXPR_ARG (arg0, 0);
8587 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8588 return build_call_expr (expfn, 1, arg);
8591 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8592 if (BUILTIN_SQRT_P (fcode))
8594 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8595 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8596 build_real (type, dconsthalf));
8597 return build_call_expr (fndecl, 2, narg0, narg1);
8600 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8601 if (BUILTIN_CBRT_P (fcode))
8603 tree arg = CALL_EXPR_ARG (arg0, 0);
8604 if (tree_expr_nonnegative_p (arg))
8606 const REAL_VALUE_TYPE dconstroot
8607 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8608 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8609 build_real (type, dconstroot));
8610 return build_call_expr (fndecl, 2, arg, narg1);
8614 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8615 if (fcode == BUILT_IN_POW
8616 || fcode == BUILT_IN_POWF
8617 || fcode == BUILT_IN_POWL)
8619 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8620 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8621 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8622 return build_call_expr (fndecl, 2, arg00, narg1);
8626 return NULL_TREE;
8629 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8630 Return NULL_TREE if no simplification can be made. */
8631 static tree
8632 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8633 tree arg0, tree arg1, tree type)
8635 if (!validate_arg (arg0, REAL_TYPE)
8636 || !validate_arg (arg1, INTEGER_TYPE))
8637 return NULL_TREE;
8639 /* Optimize pow(1.0,y) = 1.0. */
8640 if (real_onep (arg0))
8641 return omit_one_operand (type, build_real (type, dconst1), arg1);
8643 if (host_integerp (arg1, 0))
8645 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8647 /* Evaluate powi at compile-time. */
8648 if (TREE_CODE (arg0) == REAL_CST
8649 && !TREE_OVERFLOW (arg0))
8651 REAL_VALUE_TYPE x;
8652 x = TREE_REAL_CST (arg0);
8653 real_powi (&x, TYPE_MODE (type), &x, c);
8654 return build_real (type, x);
8657 /* Optimize pow(x,0) = 1.0. */
8658 if (c == 0)
8659 return omit_one_operand (type, build_real (type, dconst1),
8660 arg0);
8662 /* Optimize pow(x,1) = x. */
8663 if (c == 1)
8664 return arg0;
8666 /* Optimize pow(x,-1) = 1.0/x. */
8667 if (c == -1)
8668 return fold_build2 (RDIV_EXPR, type,
8669 build_real (type, dconst1), arg0);
8672 return NULL_TREE;
8675 /* A subroutine of fold_builtin to fold the various exponent
8676 functions. Return NULL_TREE if no simplification can be made.
8677 FUNC is the corresponding MPFR exponent function. */
8679 static tree
8680 fold_builtin_exponent (tree fndecl, tree arg,
8681 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8683 if (validate_arg (arg, REAL_TYPE))
8685 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8686 tree res;
8688 /* Calculate the result when the argument is a constant. */
8689 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8690 return res;
8692 /* Optimize expN(logN(x)) = x. */
8693 if (flag_unsafe_math_optimizations)
8695 const enum built_in_function fcode = builtin_mathfn_code (arg);
8697 if ((func == mpfr_exp
8698 && (fcode == BUILT_IN_LOG
8699 || fcode == BUILT_IN_LOGF
8700 || fcode == BUILT_IN_LOGL))
8701 || (func == mpfr_exp2
8702 && (fcode == BUILT_IN_LOG2
8703 || fcode == BUILT_IN_LOG2F
8704 || fcode == BUILT_IN_LOG2L))
8705 || (func == mpfr_exp10
8706 && (fcode == BUILT_IN_LOG10
8707 || fcode == BUILT_IN_LOG10F
8708 || fcode == BUILT_IN_LOG10L)))
8709 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8713 return NULL_TREE;
8716 /* Return true if VAR is a VAR_DECL or a component thereof. */
8718 static bool
8719 var_decl_component_p (tree var)
8721 tree inner = var;
8722 while (handled_component_p (inner))
8723 inner = TREE_OPERAND (inner, 0);
8724 return SSA_VAR_P (inner);
8727 /* Fold function call to builtin memset. Return
8728 NULL_TREE if no simplification can be made. */
8730 static tree
8731 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8733 tree var, ret;
8734 unsigned HOST_WIDE_INT length, cval;
8736 if (! validate_arg (dest, POINTER_TYPE)
8737 || ! validate_arg (c, INTEGER_TYPE)
8738 || ! validate_arg (len, INTEGER_TYPE))
8739 return NULL_TREE;
8741 if (! host_integerp (len, 1))
8742 return NULL_TREE;
8744 /* If the LEN parameter is zero, return DEST. */
8745 if (integer_zerop (len))
8746 return omit_one_operand (type, dest, c);
8748 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8749 return NULL_TREE;
8751 var = dest;
8752 STRIP_NOPS (var);
8753 if (TREE_CODE (var) != ADDR_EXPR)
8754 return NULL_TREE;
8756 var = TREE_OPERAND (var, 0);
8757 if (TREE_THIS_VOLATILE (var))
8758 return NULL_TREE;
8760 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8761 && !POINTER_TYPE_P (TREE_TYPE (var)))
8762 return NULL_TREE;
8764 if (! var_decl_component_p (var))
8765 return NULL_TREE;
8767 length = tree_low_cst (len, 1);
8768 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8769 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8770 < (int) length)
8771 return NULL_TREE;
8773 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8774 return NULL_TREE;
8776 if (integer_zerop (c))
8777 cval = 0;
8778 else
8780 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8781 return NULL_TREE;
8783 cval = tree_low_cst (c, 1);
8784 cval &= 0xff;
8785 cval |= cval << 8;
8786 cval |= cval << 16;
8787 cval |= (cval << 31) << 1;
8790 ret = build_int_cst_type (TREE_TYPE (var), cval);
8791 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8792 if (ignore)
8793 return ret;
8795 return omit_one_operand (type, dest, ret);
8798 /* Fold function call to builtin memset. Return
8799 NULL_TREE if no simplification can be made. */
8801 static tree
8802 fold_builtin_bzero (tree dest, tree size, bool ignore)
8804 if (! validate_arg (dest, POINTER_TYPE)
8805 || ! validate_arg (size, INTEGER_TYPE))
8806 return NULL_TREE;
8808 if (!ignore)
8809 return NULL_TREE;
8811 /* New argument list transforming bzero(ptr x, int y) to
8812 memset(ptr x, int 0, size_t y). This is done this way
8813 so that if it isn't expanded inline, we fallback to
8814 calling bzero instead of memset. */
8816 return fold_builtin_memset (dest, integer_zero_node,
8817 fold_convert (sizetype, size),
8818 void_type_node, ignore);
8821 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8822 NULL_TREE if no simplification can be made.
8823 If ENDP is 0, return DEST (like memcpy).
8824 If ENDP is 1, return DEST+LEN (like mempcpy).
8825 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8826 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8827 (memmove). */
8829 static tree
8830 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8832 tree destvar, srcvar, expr;
8834 if (! validate_arg (dest, POINTER_TYPE)
8835 || ! validate_arg (src, POINTER_TYPE)
8836 || ! validate_arg (len, INTEGER_TYPE))
8837 return NULL_TREE;
8839 /* If the LEN parameter is zero, return DEST. */
8840 if (integer_zerop (len))
8841 return omit_one_operand (type, dest, src);
8843 /* If SRC and DEST are the same (and not volatile), return
8844 DEST{,+LEN,+LEN-1}. */
8845 if (operand_equal_p (src, dest, 0))
8846 expr = len;
8847 else
8849 tree srctype, desttype;
8850 int src_align, dest_align;
8852 if (endp == 3)
8854 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8855 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8857 /* Both DEST and SRC must be pointer types.
8858 ??? This is what old code did. Is the testing for pointer types
8859 really mandatory?
8861 If either SRC is readonly or length is 1, we can use memcpy. */
8862 if (dest_align && src_align
8863 && (readonly_data_expr (src)
8864 || (host_integerp (len, 1)
8865 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8866 tree_low_cst (len, 1)))))
8868 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8869 if (!fn)
8870 return NULL_TREE;
8871 return build_call_expr (fn, 3, dest, src, len);
8873 return NULL_TREE;
8876 if (!host_integerp (len, 0))
8877 return NULL_TREE;
8878 /* FIXME:
8879 This logic lose for arguments like (type *)malloc (sizeof (type)),
8880 since we strip the casts of up to VOID return value from malloc.
8881 Perhaps we ought to inherit type from non-VOID argument here? */
8882 STRIP_NOPS (src);
8883 STRIP_NOPS (dest);
8884 srctype = TREE_TYPE (TREE_TYPE (src));
8885 desttype = TREE_TYPE (TREE_TYPE (dest));
8886 if (!srctype || !desttype
8887 || !TYPE_SIZE_UNIT (srctype)
8888 || !TYPE_SIZE_UNIT (desttype)
8889 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8890 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
8891 return NULL_TREE;
8893 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8894 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8895 if (dest_align < (int) TYPE_ALIGN (desttype)
8896 || src_align < (int) TYPE_ALIGN (srctype))
8897 return NULL_TREE;
8899 if (!ignore)
8900 dest = builtin_save_expr (dest);
8902 srcvar = NULL_TREE;
8903 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8905 srcvar = build_fold_indirect_ref (src);
8906 if (TREE_THIS_VOLATILE (srcvar))
8907 srcvar = NULL_TREE;
8908 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8909 srcvar = NULL_TREE;
8910 /* With memcpy, it is possible to bypass aliasing rules, so without
8911 this check i.e. execute/20060930-2.c would be misoptimized,
8912 because it use conflicting alias set to hold argument for the
8913 memcpy call. This check is probably unnecessary with
8914 -fno-strict-aliasing. Similarly for destvar. See also
8915 PR29286. */
8916 else if (!var_decl_component_p (srcvar))
8917 srcvar = NULL_TREE;
8920 destvar = NULL_TREE;
8921 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8923 destvar = build_fold_indirect_ref (dest);
8924 if (TREE_THIS_VOLATILE (destvar))
8925 destvar = NULL_TREE;
8926 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8927 destvar = NULL_TREE;
8928 else if (!var_decl_component_p (destvar))
8929 destvar = NULL_TREE;
8932 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8933 return NULL_TREE;
8935 if (srcvar == NULL_TREE)
8937 tree srcptype;
8938 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8939 return NULL_TREE;
8941 srctype = desttype;
8942 if (src_align < (int) TYPE_ALIGN (srctype))
8944 if (AGGREGATE_TYPE_P (srctype)
8945 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8946 return NULL_TREE;
8948 srctype = build_variant_type_copy (srctype);
8949 TYPE_ALIGN (srctype) = src_align;
8950 TYPE_USER_ALIGN (srctype) = 1;
8951 TYPE_PACKED (srctype) = 1;
8953 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8954 src = fold_convert (srcptype, src);
8955 srcvar = build_fold_indirect_ref (src);
8957 else if (destvar == NULL_TREE)
8959 tree destptype;
8960 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8961 return NULL_TREE;
8963 desttype = srctype;
8964 if (dest_align < (int) TYPE_ALIGN (desttype))
8966 if (AGGREGATE_TYPE_P (desttype)
8967 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8968 return NULL_TREE;
8970 desttype = build_variant_type_copy (desttype);
8971 TYPE_ALIGN (desttype) = dest_align;
8972 TYPE_USER_ALIGN (desttype) = 1;
8973 TYPE_PACKED (desttype) = 1;
8975 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8976 dest = fold_convert (destptype, dest);
8977 destvar = build_fold_indirect_ref (dest);
8980 if (srctype == desttype
8981 || (gimple_in_ssa_p (cfun)
8982 && useless_type_conversion_p (desttype, srctype)))
8983 expr = srcvar;
8984 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8985 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8986 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8987 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8988 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8989 else
8990 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8991 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8994 if (ignore)
8995 return expr;
8997 if (endp == 0 || endp == 3)
8998 return omit_one_operand (type, dest, expr);
9000 if (expr == len)
9001 expr = NULL_TREE;
9003 if (endp == 2)
9004 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
9005 ssize_int (1));
9007 len = fold_convert (sizetype, len);
9008 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9009 dest = fold_convert (type, dest);
9010 if (expr)
9011 dest = omit_one_operand (type, dest, expr);
9012 return dest;
9015 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9016 If LEN is not NULL, it represents the length of the string to be
9017 copied. Return NULL_TREE if no simplification can be made. */
9019 tree
9020 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
9022 tree fn;
9024 if (!validate_arg (dest, POINTER_TYPE)
9025 || !validate_arg (src, POINTER_TYPE))
9026 return NULL_TREE;
9028 /* If SRC and DEST are the same (and not volatile), return DEST. */
9029 if (operand_equal_p (src, dest, 0))
9030 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
9032 if (optimize_function_for_size_p (cfun))
9033 return NULL_TREE;
9035 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9036 if (!fn)
9037 return NULL_TREE;
9039 if (!len)
9041 len = c_strlen (src, 1);
9042 if (! len || TREE_SIDE_EFFECTS (len))
9043 return NULL_TREE;
9046 len = size_binop (PLUS_EXPR, len, ssize_int (1));
9047 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9048 build_call_expr (fn, 3, dest, src, len));
9051 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9052 If SLEN is not NULL, it represents the length of the source string.
9053 Return NULL_TREE if no simplification can be made. */
9055 tree
9056 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
9058 tree fn;
9060 if (!validate_arg (dest, POINTER_TYPE)
9061 || !validate_arg (src, POINTER_TYPE)
9062 || !validate_arg (len, INTEGER_TYPE))
9063 return NULL_TREE;
9065 /* If the LEN parameter is zero, return DEST. */
9066 if (integer_zerop (len))
9067 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9069 /* We can't compare slen with len as constants below if len is not a
9070 constant. */
9071 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9072 return NULL_TREE;
9074 if (!slen)
9075 slen = c_strlen (src, 1);
9077 /* Now, we must be passed a constant src ptr parameter. */
9078 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9079 return NULL_TREE;
9081 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
9083 /* We do not support simplification of this case, though we do
9084 support it when expanding trees into RTL. */
9085 /* FIXME: generate a call to __builtin_memset. */
9086 if (tree_int_cst_lt (slen, len))
9087 return NULL_TREE;
9089 /* OK transform into builtin memcpy. */
9090 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9091 if (!fn)
9092 return NULL_TREE;
9093 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9094 build_call_expr (fn, 3, dest, src, len));
9097 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9098 arguments to the call, and TYPE is its return type.
9099 Return NULL_TREE if no simplification can be made. */
9101 static tree
9102 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
9104 if (!validate_arg (arg1, POINTER_TYPE)
9105 || !validate_arg (arg2, INTEGER_TYPE)
9106 || !validate_arg (len, INTEGER_TYPE))
9107 return NULL_TREE;
9108 else
9110 const char *p1;
9112 if (TREE_CODE (arg2) != INTEGER_CST
9113 || !host_integerp (len, 1))
9114 return NULL_TREE;
9116 p1 = c_getstr (arg1);
9117 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9119 char c;
9120 const char *r;
9121 tree tem;
9123 if (target_char_cast (arg2, &c))
9124 return NULL_TREE;
9126 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9128 if (r == NULL)
9129 return build_int_cst (TREE_TYPE (arg1), 0);
9131 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9132 size_int (r - p1));
9133 return fold_convert (type, tem);
9135 return NULL_TREE;
9139 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9140 Return NULL_TREE if no simplification can be made. */
9142 static tree
9143 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9145 const char *p1, *p2;
9147 if (!validate_arg (arg1, POINTER_TYPE)
9148 || !validate_arg (arg2, POINTER_TYPE)
9149 || !validate_arg (len, INTEGER_TYPE))
9150 return NULL_TREE;
9152 /* If the LEN parameter is zero, return zero. */
9153 if (integer_zerop (len))
9154 return omit_two_operands (integer_type_node, integer_zero_node,
9155 arg1, arg2);
9157 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9158 if (operand_equal_p (arg1, arg2, 0))
9159 return omit_one_operand (integer_type_node, integer_zero_node, len);
9161 p1 = c_getstr (arg1);
9162 p2 = c_getstr (arg2);
9164 /* If all arguments are constant, and the value of len is not greater
9165 than the lengths of arg1 and arg2, evaluate at compile-time. */
9166 if (host_integerp (len, 1) && p1 && p2
9167 && compare_tree_int (len, strlen (p1) + 1) <= 0
9168 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9170 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9172 if (r > 0)
9173 return integer_one_node;
9174 else if (r < 0)
9175 return integer_minus_one_node;
9176 else
9177 return integer_zero_node;
9180 /* If len parameter is one, return an expression corresponding to
9181 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9182 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9184 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9185 tree cst_uchar_ptr_node
9186 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9188 tree ind1 = fold_convert (integer_type_node,
9189 build1 (INDIRECT_REF, cst_uchar_node,
9190 fold_convert (cst_uchar_ptr_node,
9191 arg1)));
9192 tree ind2 = fold_convert (integer_type_node,
9193 build1 (INDIRECT_REF, cst_uchar_node,
9194 fold_convert (cst_uchar_ptr_node,
9195 arg2)));
9196 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9199 return NULL_TREE;
9202 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9203 Return NULL_TREE if no simplification can be made. */
9205 static tree
9206 fold_builtin_strcmp (tree arg1, tree arg2)
9208 const char *p1, *p2;
9210 if (!validate_arg (arg1, POINTER_TYPE)
9211 || !validate_arg (arg2, POINTER_TYPE))
9212 return NULL_TREE;
9214 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9215 if (operand_equal_p (arg1, arg2, 0))
9216 return integer_zero_node;
9218 p1 = c_getstr (arg1);
9219 p2 = c_getstr (arg2);
9221 if (p1 && p2)
9223 const int i = strcmp (p1, p2);
9224 if (i < 0)
9225 return integer_minus_one_node;
9226 else if (i > 0)
9227 return integer_one_node;
9228 else
9229 return integer_zero_node;
9232 /* If the second arg is "", return *(const unsigned char*)arg1. */
9233 if (p2 && *p2 == '\0')
9235 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9236 tree cst_uchar_ptr_node
9237 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9239 return fold_convert (integer_type_node,
9240 build1 (INDIRECT_REF, cst_uchar_node,
9241 fold_convert (cst_uchar_ptr_node,
9242 arg1)));
9245 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9246 if (p1 && *p1 == '\0')
9248 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9249 tree cst_uchar_ptr_node
9250 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9252 tree temp = fold_convert (integer_type_node,
9253 build1 (INDIRECT_REF, cst_uchar_node,
9254 fold_convert (cst_uchar_ptr_node,
9255 arg2)));
9256 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9259 return NULL_TREE;
9262 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9263 Return NULL_TREE if no simplification can be made. */
9265 static tree
9266 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9268 const char *p1, *p2;
9270 if (!validate_arg (arg1, POINTER_TYPE)
9271 || !validate_arg (arg2, POINTER_TYPE)
9272 || !validate_arg (len, INTEGER_TYPE))
9273 return NULL_TREE;
9275 /* If the LEN parameter is zero, return zero. */
9276 if (integer_zerop (len))
9277 return omit_two_operands (integer_type_node, integer_zero_node,
9278 arg1, arg2);
9280 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9281 if (operand_equal_p (arg1, arg2, 0))
9282 return omit_one_operand (integer_type_node, integer_zero_node, len);
9284 p1 = c_getstr (arg1);
9285 p2 = c_getstr (arg2);
9287 if (host_integerp (len, 1) && p1 && p2)
9289 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9290 if (i > 0)
9291 return integer_one_node;
9292 else if (i < 0)
9293 return integer_minus_one_node;
9294 else
9295 return integer_zero_node;
9298 /* If the second arg is "", and the length is greater than zero,
9299 return *(const unsigned char*)arg1. */
9300 if (p2 && *p2 == '\0'
9301 && TREE_CODE (len) == INTEGER_CST
9302 && tree_int_cst_sgn (len) == 1)
9304 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9305 tree cst_uchar_ptr_node
9306 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9308 return fold_convert (integer_type_node,
9309 build1 (INDIRECT_REF, cst_uchar_node,
9310 fold_convert (cst_uchar_ptr_node,
9311 arg1)));
9314 /* If the first arg is "", and the length is greater than zero,
9315 return -*(const unsigned char*)arg2. */
9316 if (p1 && *p1 == '\0'
9317 && TREE_CODE (len) == INTEGER_CST
9318 && tree_int_cst_sgn (len) == 1)
9320 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9321 tree cst_uchar_ptr_node
9322 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9324 tree temp = fold_convert (integer_type_node,
9325 build1 (INDIRECT_REF, cst_uchar_node,
9326 fold_convert (cst_uchar_ptr_node,
9327 arg2)));
9328 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9331 /* If len parameter is one, return an expression corresponding to
9332 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9333 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9335 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9336 tree cst_uchar_ptr_node
9337 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9339 tree ind1 = fold_convert (integer_type_node,
9340 build1 (INDIRECT_REF, cst_uchar_node,
9341 fold_convert (cst_uchar_ptr_node,
9342 arg1)));
9343 tree ind2 = fold_convert (integer_type_node,
9344 build1 (INDIRECT_REF, cst_uchar_node,
9345 fold_convert (cst_uchar_ptr_node,
9346 arg2)));
9347 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9350 return NULL_TREE;
9353 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9354 ARG. Return NULL_TREE if no simplification can be made. */
9356 static tree
9357 fold_builtin_signbit (tree arg, tree type)
9359 tree temp;
9361 if (!validate_arg (arg, REAL_TYPE))
9362 return NULL_TREE;
9364 /* If ARG is a compile-time constant, determine the result. */
9365 if (TREE_CODE (arg) == REAL_CST
9366 && !TREE_OVERFLOW (arg))
9368 REAL_VALUE_TYPE c;
9370 c = TREE_REAL_CST (arg);
9371 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9372 return fold_convert (type, temp);
9375 /* If ARG is non-negative, the result is always zero. */
9376 if (tree_expr_nonnegative_p (arg))
9377 return omit_one_operand (type, integer_zero_node, arg);
9379 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9380 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9381 return fold_build2 (LT_EXPR, type, arg,
9382 build_real (TREE_TYPE (arg), dconst0));
9384 return NULL_TREE;
9387 /* Fold function call to builtin copysign, copysignf or copysignl with
9388 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9389 be made. */
9391 static tree
9392 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9394 tree tem;
9396 if (!validate_arg (arg1, REAL_TYPE)
9397 || !validate_arg (arg2, REAL_TYPE))
9398 return NULL_TREE;
9400 /* copysign(X,X) is X. */
9401 if (operand_equal_p (arg1, arg2, 0))
9402 return fold_convert (type, arg1);
9404 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9405 if (TREE_CODE (arg1) == REAL_CST
9406 && TREE_CODE (arg2) == REAL_CST
9407 && !TREE_OVERFLOW (arg1)
9408 && !TREE_OVERFLOW (arg2))
9410 REAL_VALUE_TYPE c1, c2;
9412 c1 = TREE_REAL_CST (arg1);
9413 c2 = TREE_REAL_CST (arg2);
9414 /* c1.sign := c2.sign. */
9415 real_copysign (&c1, &c2);
9416 return build_real (type, c1);
9419 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9420 Remember to evaluate Y for side-effects. */
9421 if (tree_expr_nonnegative_p (arg2))
9422 return omit_one_operand (type,
9423 fold_build1 (ABS_EXPR, type, arg1),
9424 arg2);
9426 /* Strip sign changing operations for the first argument. */
9427 tem = fold_strip_sign_ops (arg1);
9428 if (tem)
9429 return build_call_expr (fndecl, 2, tem, arg2);
9431 return NULL_TREE;
9434 /* Fold a call to builtin isascii with argument ARG. */
9436 static tree
9437 fold_builtin_isascii (tree arg)
9439 if (!validate_arg (arg, INTEGER_TYPE))
9440 return NULL_TREE;
9441 else
9443 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9444 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9445 build_int_cst (NULL_TREE,
9446 ~ (unsigned HOST_WIDE_INT) 0x7f));
9447 return fold_build2 (EQ_EXPR, integer_type_node,
9448 arg, integer_zero_node);
9452 /* Fold a call to builtin toascii with argument ARG. */
9454 static tree
9455 fold_builtin_toascii (tree arg)
9457 if (!validate_arg (arg, INTEGER_TYPE))
9458 return NULL_TREE;
9460 /* Transform toascii(c) -> (c & 0x7f). */
9461 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9462 build_int_cst (NULL_TREE, 0x7f));
9465 /* Fold a call to builtin isdigit with argument ARG. */
9467 static tree
9468 fold_builtin_isdigit (tree arg)
9470 if (!validate_arg (arg, INTEGER_TYPE))
9471 return NULL_TREE;
9472 else
9474 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9475 /* According to the C standard, isdigit is unaffected by locale.
9476 However, it definitely is affected by the target character set. */
9477 unsigned HOST_WIDE_INT target_digit0
9478 = lang_hooks.to_target_charset ('0');
9480 if (target_digit0 == 0)
9481 return NULL_TREE;
9483 arg = fold_convert (unsigned_type_node, arg);
9484 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9485 build_int_cst (unsigned_type_node, target_digit0));
9486 return fold_build2 (LE_EXPR, integer_type_node, arg,
9487 build_int_cst (unsigned_type_node, 9));
9491 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9493 static tree
9494 fold_builtin_fabs (tree arg, tree type)
9496 if (!validate_arg (arg, REAL_TYPE))
9497 return NULL_TREE;
9499 arg = fold_convert (type, arg);
9500 if (TREE_CODE (arg) == REAL_CST)
9501 return fold_abs_const (arg, type);
9502 return fold_build1 (ABS_EXPR, type, arg);
9505 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9507 static tree
9508 fold_builtin_abs (tree arg, tree type)
9510 if (!validate_arg (arg, INTEGER_TYPE))
9511 return NULL_TREE;
9513 arg = fold_convert (type, arg);
9514 if (TREE_CODE (arg) == INTEGER_CST)
9515 return fold_abs_const (arg, type);
9516 return fold_build1 (ABS_EXPR, type, arg);
9519 /* Fold a call to builtin fmin or fmax. */
9521 static tree
9522 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9524 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9526 /* Calculate the result when the argument is a constant. */
9527 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9529 if (res)
9530 return res;
9532 /* If either argument is NaN, return the other one. Avoid the
9533 transformation if we get (and honor) a signalling NaN. Using
9534 omit_one_operand() ensures we create a non-lvalue. */
9535 if (TREE_CODE (arg0) == REAL_CST
9536 && real_isnan (&TREE_REAL_CST (arg0))
9537 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9538 || ! TREE_REAL_CST (arg0).signalling))
9539 return omit_one_operand (type, arg1, arg0);
9540 if (TREE_CODE (arg1) == REAL_CST
9541 && real_isnan (&TREE_REAL_CST (arg1))
9542 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9543 || ! TREE_REAL_CST (arg1).signalling))
9544 return omit_one_operand (type, arg0, arg1);
9546 /* Transform fmin/fmax(x,x) -> x. */
9547 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9548 return omit_one_operand (type, arg0, arg1);
9550 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9551 functions to return the numeric arg if the other one is NaN.
9552 These tree codes don't honor that, so only transform if
9553 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9554 handled, so we don't have to worry about it either. */
9555 if (flag_finite_math_only)
9556 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9557 fold_convert (type, arg0),
9558 fold_convert (type, arg1));
9560 return NULL_TREE;
9563 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9565 static tree
9566 fold_builtin_carg (tree arg, tree type)
9568 if (validate_arg (arg, COMPLEX_TYPE))
9570 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9572 if (atan2_fn)
9574 tree new_arg = builtin_save_expr (arg);
9575 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9576 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9577 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9581 return NULL_TREE;
9584 /* Fold a call to builtin logb/ilogb. */
9586 static tree
9587 fold_builtin_logb (tree arg, tree rettype)
9589 if (! validate_arg (arg, REAL_TYPE))
9590 return NULL_TREE;
9592 STRIP_NOPS (arg);
9594 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9596 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9598 switch (value->cl)
9600 case rvc_nan:
9601 case rvc_inf:
9602 /* If arg is Inf or NaN and we're logb, return it. */
9603 if (TREE_CODE (rettype) == REAL_TYPE)
9604 return fold_convert (rettype, arg);
9605 /* Fall through... */
9606 case rvc_zero:
9607 /* Zero may set errno and/or raise an exception for logb, also
9608 for ilogb we don't know FP_ILOGB0. */
9609 return NULL_TREE;
9610 case rvc_normal:
9611 /* For normal numbers, proceed iff radix == 2. In GCC,
9612 normalized significands are in the range [0.5, 1.0). We
9613 want the exponent as if they were [1.0, 2.0) so get the
9614 exponent and subtract 1. */
9615 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9616 return fold_convert (rettype, build_int_cst (NULL_TREE,
9617 REAL_EXP (value)-1));
9618 break;
9622 return NULL_TREE;
9625 /* Fold a call to builtin significand, if radix == 2. */
9627 static tree
9628 fold_builtin_significand (tree arg, tree rettype)
9630 if (! validate_arg (arg, REAL_TYPE))
9631 return NULL_TREE;
9633 STRIP_NOPS (arg);
9635 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9637 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9639 switch (value->cl)
9641 case rvc_zero:
9642 case rvc_nan:
9643 case rvc_inf:
9644 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9645 return fold_convert (rettype, arg);
9646 case rvc_normal:
9647 /* For normal numbers, proceed iff radix == 2. */
9648 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9650 REAL_VALUE_TYPE result = *value;
9651 /* In GCC, normalized significands are in the range [0.5,
9652 1.0). We want them to be [1.0, 2.0) so set the
9653 exponent to 1. */
9654 SET_REAL_EXP (&result, 1);
9655 return build_real (rettype, result);
9657 break;
9661 return NULL_TREE;
9664 /* Fold a call to builtin frexp, we can assume the base is 2. */
9666 static tree
9667 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9669 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9670 return NULL_TREE;
9672 STRIP_NOPS (arg0);
9674 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9675 return NULL_TREE;
9677 arg1 = build_fold_indirect_ref (arg1);
9679 /* Proceed if a valid pointer type was passed in. */
9680 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9682 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9683 tree frac, exp;
9685 switch (value->cl)
9687 case rvc_zero:
9688 /* For +-0, return (*exp = 0, +-0). */
9689 exp = integer_zero_node;
9690 frac = arg0;
9691 break;
9692 case rvc_nan:
9693 case rvc_inf:
9694 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9695 return omit_one_operand (rettype, arg0, arg1);
9696 case rvc_normal:
9698 /* Since the frexp function always expects base 2, and in
9699 GCC normalized significands are already in the range
9700 [0.5, 1.0), we have exactly what frexp wants. */
9701 REAL_VALUE_TYPE frac_rvt = *value;
9702 SET_REAL_EXP (&frac_rvt, 0);
9703 frac = build_real (rettype, frac_rvt);
9704 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9706 break;
9707 default:
9708 gcc_unreachable ();
9711 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9712 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9713 TREE_SIDE_EFFECTS (arg1) = 1;
9714 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9717 return NULL_TREE;
9720 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9721 then we can assume the base is two. If it's false, then we have to
9722 check the mode of the TYPE parameter in certain cases. */
9724 static tree
9725 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9727 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9729 STRIP_NOPS (arg0);
9730 STRIP_NOPS (arg1);
9732 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9733 if (real_zerop (arg0) || integer_zerop (arg1)
9734 || (TREE_CODE (arg0) == REAL_CST
9735 && !real_isfinite (&TREE_REAL_CST (arg0))))
9736 return omit_one_operand (type, arg0, arg1);
9738 /* If both arguments are constant, then try to evaluate it. */
9739 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9740 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9741 && host_integerp (arg1, 0))
9743 /* Bound the maximum adjustment to twice the range of the
9744 mode's valid exponents. Use abs to ensure the range is
9745 positive as a sanity check. */
9746 const long max_exp_adj = 2 *
9747 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9748 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9750 /* Get the user-requested adjustment. */
9751 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9753 /* The requested adjustment must be inside this range. This
9754 is a preliminary cap to avoid things like overflow, we
9755 may still fail to compute the result for other reasons. */
9756 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9758 REAL_VALUE_TYPE initial_result;
9760 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9762 /* Ensure we didn't overflow. */
9763 if (! real_isinf (&initial_result))
9765 const REAL_VALUE_TYPE trunc_result
9766 = real_value_truncate (TYPE_MODE (type), initial_result);
9768 /* Only proceed if the target mode can hold the
9769 resulting value. */
9770 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9771 return build_real (type, trunc_result);
9777 return NULL_TREE;
9780 /* Fold a call to builtin modf. */
9782 static tree
9783 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9785 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9786 return NULL_TREE;
9788 STRIP_NOPS (arg0);
9790 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9791 return NULL_TREE;
9793 arg1 = build_fold_indirect_ref (arg1);
9795 /* Proceed if a valid pointer type was passed in. */
9796 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9798 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9799 REAL_VALUE_TYPE trunc, frac;
9801 switch (value->cl)
9803 case rvc_nan:
9804 case rvc_zero:
9805 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9806 trunc = frac = *value;
9807 break;
9808 case rvc_inf:
9809 /* For +-Inf, return (*arg1 = arg0, +-0). */
9810 frac = dconst0;
9811 frac.sign = value->sign;
9812 trunc = *value;
9813 break;
9814 case rvc_normal:
9815 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9816 real_trunc (&trunc, VOIDmode, value);
9817 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9818 /* If the original number was negative and already
9819 integral, then the fractional part is -0.0. */
9820 if (value->sign && frac.cl == rvc_zero)
9821 frac.sign = value->sign;
9822 break;
9825 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9826 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9827 build_real (rettype, trunc));
9828 TREE_SIDE_EFFECTS (arg1) = 1;
9829 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9830 build_real (rettype, frac));
9833 return NULL_TREE;
9836 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9837 ARG is the argument for the call. */
9839 static tree
9840 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9842 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9843 REAL_VALUE_TYPE r;
9845 if (!validate_arg (arg, REAL_TYPE))
9846 return NULL_TREE;
9848 switch (builtin_index)
9850 case BUILT_IN_ISINF:
9851 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9852 return omit_one_operand (type, integer_zero_node, arg);
9854 if (TREE_CODE (arg) == REAL_CST)
9856 r = TREE_REAL_CST (arg);
9857 if (real_isinf (&r))
9858 return real_compare (GT_EXPR, &r, &dconst0)
9859 ? integer_one_node : integer_minus_one_node;
9860 else
9861 return integer_zero_node;
9864 return NULL_TREE;
9866 case BUILT_IN_ISINF_SIGN:
9868 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9869 /* In a boolean context, GCC will fold the inner COND_EXPR to
9870 1. So e.g. "if (isinf_sign(x))" would be folded to just
9871 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9872 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9873 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9874 tree tmp = NULL_TREE;
9876 arg = builtin_save_expr (arg);
9878 if (signbit_fn && isinf_fn)
9880 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9881 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9883 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9884 signbit_call, integer_zero_node);
9885 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9886 isinf_call, integer_zero_node);
9888 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9889 integer_minus_one_node, integer_one_node);
9890 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9891 integer_zero_node);
9894 return tmp;
9897 case BUILT_IN_ISFINITE:
9898 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9899 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9900 return omit_one_operand (type, integer_one_node, arg);
9902 if (TREE_CODE (arg) == REAL_CST)
9904 r = TREE_REAL_CST (arg);
9905 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9908 return NULL_TREE;
9910 case BUILT_IN_ISNAN:
9911 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9912 return omit_one_operand (type, integer_zero_node, arg);
9914 if (TREE_CODE (arg) == REAL_CST)
9916 r = TREE_REAL_CST (arg);
9917 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9920 arg = builtin_save_expr (arg);
9921 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9923 default:
9924 gcc_unreachable ();
9928 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9929 This builtin will generate code to return the appropriate floating
9930 point classification depending on the value of the floating point
9931 number passed in. The possible return values must be supplied as
9932 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9933 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9934 one floating point argument which is "type generic". */
9936 static tree
9937 fold_builtin_fpclassify (tree exp)
9939 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9940 arg, type, res, tmp;
9941 enum machine_mode mode;
9942 REAL_VALUE_TYPE r;
9943 char buf[128];
9945 /* Verify the required arguments in the original call. */
9946 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9947 INTEGER_TYPE, INTEGER_TYPE,
9948 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9949 return NULL_TREE;
9951 fp_nan = CALL_EXPR_ARG (exp, 0);
9952 fp_infinite = CALL_EXPR_ARG (exp, 1);
9953 fp_normal = CALL_EXPR_ARG (exp, 2);
9954 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9955 fp_zero = CALL_EXPR_ARG (exp, 4);
9956 arg = CALL_EXPR_ARG (exp, 5);
9957 type = TREE_TYPE (arg);
9958 mode = TYPE_MODE (type);
9959 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9961 /* fpclassify(x) ->
9962 isnan(x) ? FP_NAN :
9963 (fabs(x) == Inf ? FP_INFINITE :
9964 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9965 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9967 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9968 build_real (type, dconst0));
9969 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9971 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9972 real_from_string (&r, buf);
9973 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9974 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9976 if (HONOR_INFINITIES (mode))
9978 real_inf (&r);
9979 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9980 build_real (type, r));
9981 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9984 if (HONOR_NANS (mode))
9986 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9987 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9990 return res;
9993 /* Fold a call to an unordered comparison function such as
9994 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9995 being called and ARG0 and ARG1 are the arguments for the call.
9996 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9997 the opposite of the desired result. UNORDERED_CODE is used
9998 for modes that can hold NaNs and ORDERED_CODE is used for
9999 the rest. */
10001 static tree
10002 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
10003 enum tree_code unordered_code,
10004 enum tree_code ordered_code)
10006 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10007 enum tree_code code;
10008 tree type0, type1;
10009 enum tree_code code0, code1;
10010 tree cmp_type = NULL_TREE;
10012 type0 = TREE_TYPE (arg0);
10013 type1 = TREE_TYPE (arg1);
10015 code0 = TREE_CODE (type0);
10016 code1 = TREE_CODE (type1);
10018 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10019 /* Choose the wider of two real types. */
10020 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10021 ? type0 : type1;
10022 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10023 cmp_type = type0;
10024 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10025 cmp_type = type1;
10027 arg0 = fold_convert (cmp_type, arg0);
10028 arg1 = fold_convert (cmp_type, arg1);
10030 if (unordered_code == UNORDERED_EXPR)
10032 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10033 return omit_two_operands (type, integer_zero_node, arg0, arg1);
10034 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
10037 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10038 : ordered_code;
10039 return fold_build1 (TRUTH_NOT_EXPR, type,
10040 fold_build2 (code, type, arg0, arg1));
10043 /* Fold a call to built-in function FNDECL with 0 arguments.
10044 IGNORE is true if the result of the function call is ignored. This
10045 function returns NULL_TREE if no simplification was possible. */
10047 static tree
10048 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10050 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10051 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10052 switch (fcode)
10054 CASE_FLT_FN (BUILT_IN_INF):
10055 case BUILT_IN_INFD32:
10056 case BUILT_IN_INFD64:
10057 case BUILT_IN_INFD128:
10058 return fold_builtin_inf (type, true);
10060 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10061 return fold_builtin_inf (type, false);
10063 case BUILT_IN_CLASSIFY_TYPE:
10064 return fold_builtin_classify_type (NULL_TREE);
10066 default:
10067 break;
10069 return NULL_TREE;
10072 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10073 IGNORE is true if the result of the function call is ignored. This
10074 function returns NULL_TREE if no simplification was possible. */
10076 static tree
10077 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
10079 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10080 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10081 switch (fcode)
10084 case BUILT_IN_CONSTANT_P:
10086 tree val = fold_builtin_constant_p (arg0);
10088 /* Gimplification will pull the CALL_EXPR for the builtin out of
10089 an if condition. When not optimizing, we'll not CSE it back.
10090 To avoid link error types of regressions, return false now. */
10091 if (!val && !optimize)
10092 val = integer_zero_node;
10094 return val;
10097 case BUILT_IN_CLASSIFY_TYPE:
10098 return fold_builtin_classify_type (arg0);
10100 case BUILT_IN_STRLEN:
10101 return fold_builtin_strlen (arg0);
10103 CASE_FLT_FN (BUILT_IN_FABS):
10104 return fold_builtin_fabs (arg0, type);
10106 case BUILT_IN_ABS:
10107 case BUILT_IN_LABS:
10108 case BUILT_IN_LLABS:
10109 case BUILT_IN_IMAXABS:
10110 return fold_builtin_abs (arg0, type);
10112 CASE_FLT_FN (BUILT_IN_CONJ):
10113 if (validate_arg (arg0, COMPLEX_TYPE))
10114 return fold_build1 (CONJ_EXPR, type, arg0);
10115 break;
10117 CASE_FLT_FN (BUILT_IN_CREAL):
10118 if (validate_arg (arg0, COMPLEX_TYPE))
10119 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
10120 break;
10122 CASE_FLT_FN (BUILT_IN_CIMAG):
10123 if (validate_arg (arg0, COMPLEX_TYPE))
10124 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10125 break;
10127 CASE_FLT_FN (BUILT_IN_CCOS):
10128 CASE_FLT_FN (BUILT_IN_CCOSH):
10129 /* These functions are "even", i.e. f(x) == f(-x). */
10130 if (validate_arg (arg0, COMPLEX_TYPE))
10132 tree narg = fold_strip_sign_ops (arg0);
10133 if (narg)
10134 return build_call_expr (fndecl, 1, narg);
10136 break;
10138 CASE_FLT_FN (BUILT_IN_CABS):
10139 return fold_builtin_cabs (arg0, type, fndecl);
10141 CASE_FLT_FN (BUILT_IN_CARG):
10142 return fold_builtin_carg (arg0, type);
10144 CASE_FLT_FN (BUILT_IN_SQRT):
10145 return fold_builtin_sqrt (arg0, type);
10147 CASE_FLT_FN (BUILT_IN_CBRT):
10148 return fold_builtin_cbrt (arg0, type);
10150 CASE_FLT_FN (BUILT_IN_ASIN):
10151 if (validate_arg (arg0, REAL_TYPE))
10152 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10153 &dconstm1, &dconst1, true);
10154 break;
10156 CASE_FLT_FN (BUILT_IN_ACOS):
10157 if (validate_arg (arg0, REAL_TYPE))
10158 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10159 &dconstm1, &dconst1, true);
10160 break;
10162 CASE_FLT_FN (BUILT_IN_ATAN):
10163 if (validate_arg (arg0, REAL_TYPE))
10164 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10165 break;
10167 CASE_FLT_FN (BUILT_IN_ASINH):
10168 if (validate_arg (arg0, REAL_TYPE))
10169 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10170 break;
10172 CASE_FLT_FN (BUILT_IN_ACOSH):
10173 if (validate_arg (arg0, REAL_TYPE))
10174 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10175 &dconst1, NULL, true);
10176 break;
10178 CASE_FLT_FN (BUILT_IN_ATANH):
10179 if (validate_arg (arg0, REAL_TYPE))
10180 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10181 &dconstm1, &dconst1, false);
10182 break;
10184 CASE_FLT_FN (BUILT_IN_SIN):
10185 if (validate_arg (arg0, REAL_TYPE))
10186 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10187 break;
10189 CASE_FLT_FN (BUILT_IN_COS):
10190 return fold_builtin_cos (arg0, type, fndecl);
10191 break;
10193 CASE_FLT_FN (BUILT_IN_TAN):
10194 return fold_builtin_tan (arg0, type);
10196 CASE_FLT_FN (BUILT_IN_CEXP):
10197 return fold_builtin_cexp (arg0, type);
10199 CASE_FLT_FN (BUILT_IN_CEXPI):
10200 if (validate_arg (arg0, REAL_TYPE))
10201 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10202 break;
10204 CASE_FLT_FN (BUILT_IN_SINH):
10205 if (validate_arg (arg0, REAL_TYPE))
10206 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10207 break;
10209 CASE_FLT_FN (BUILT_IN_COSH):
10210 return fold_builtin_cosh (arg0, type, fndecl);
10212 CASE_FLT_FN (BUILT_IN_TANH):
10213 if (validate_arg (arg0, REAL_TYPE))
10214 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10215 break;
10217 CASE_FLT_FN (BUILT_IN_ERF):
10218 if (validate_arg (arg0, REAL_TYPE))
10219 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10220 break;
10222 CASE_FLT_FN (BUILT_IN_ERFC):
10223 if (validate_arg (arg0, REAL_TYPE))
10224 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10225 break;
10227 CASE_FLT_FN (BUILT_IN_TGAMMA):
10228 if (validate_arg (arg0, REAL_TYPE))
10229 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10230 break;
10232 CASE_FLT_FN (BUILT_IN_EXP):
10233 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10235 CASE_FLT_FN (BUILT_IN_EXP2):
10236 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10238 CASE_FLT_FN (BUILT_IN_EXP10):
10239 CASE_FLT_FN (BUILT_IN_POW10):
10240 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10242 CASE_FLT_FN (BUILT_IN_EXPM1):
10243 if (validate_arg (arg0, REAL_TYPE))
10244 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10245 break;
10247 CASE_FLT_FN (BUILT_IN_LOG):
10248 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10250 CASE_FLT_FN (BUILT_IN_LOG2):
10251 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10253 CASE_FLT_FN (BUILT_IN_LOG10):
10254 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10256 CASE_FLT_FN (BUILT_IN_LOG1P):
10257 if (validate_arg (arg0, REAL_TYPE))
10258 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10259 &dconstm1, NULL, false);
10260 break;
10262 CASE_FLT_FN (BUILT_IN_J0):
10263 if (validate_arg (arg0, REAL_TYPE))
10264 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10265 NULL, NULL, 0);
10266 break;
10268 CASE_FLT_FN (BUILT_IN_J1):
10269 if (validate_arg (arg0, REAL_TYPE))
10270 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10271 NULL, NULL, 0);
10272 break;
10274 CASE_FLT_FN (BUILT_IN_Y0):
10275 if (validate_arg (arg0, REAL_TYPE))
10276 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10277 &dconst0, NULL, false);
10278 break;
10280 CASE_FLT_FN (BUILT_IN_Y1):
10281 if (validate_arg (arg0, REAL_TYPE))
10282 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10283 &dconst0, NULL, false);
10284 break;
10286 CASE_FLT_FN (BUILT_IN_NAN):
10287 case BUILT_IN_NAND32:
10288 case BUILT_IN_NAND64:
10289 case BUILT_IN_NAND128:
10290 return fold_builtin_nan (arg0, type, true);
10292 CASE_FLT_FN (BUILT_IN_NANS):
10293 return fold_builtin_nan (arg0, type, false);
10295 CASE_FLT_FN (BUILT_IN_FLOOR):
10296 return fold_builtin_floor (fndecl, arg0);
10298 CASE_FLT_FN (BUILT_IN_CEIL):
10299 return fold_builtin_ceil (fndecl, arg0);
10301 CASE_FLT_FN (BUILT_IN_TRUNC):
10302 return fold_builtin_trunc (fndecl, arg0);
10304 CASE_FLT_FN (BUILT_IN_ROUND):
10305 return fold_builtin_round (fndecl, arg0);
10307 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10308 CASE_FLT_FN (BUILT_IN_RINT):
10309 return fold_trunc_transparent_mathfn (fndecl, arg0);
10311 CASE_FLT_FN (BUILT_IN_LCEIL):
10312 CASE_FLT_FN (BUILT_IN_LLCEIL):
10313 CASE_FLT_FN (BUILT_IN_LFLOOR):
10314 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10315 CASE_FLT_FN (BUILT_IN_LROUND):
10316 CASE_FLT_FN (BUILT_IN_LLROUND):
10317 return fold_builtin_int_roundingfn (fndecl, arg0);
10319 CASE_FLT_FN (BUILT_IN_LRINT):
10320 CASE_FLT_FN (BUILT_IN_LLRINT):
10321 return fold_fixed_mathfn (fndecl, arg0);
10323 case BUILT_IN_BSWAP32:
10324 case BUILT_IN_BSWAP64:
10325 return fold_builtin_bswap (fndecl, arg0);
10327 CASE_INT_FN (BUILT_IN_FFS):
10328 CASE_INT_FN (BUILT_IN_CLZ):
10329 CASE_INT_FN (BUILT_IN_CTZ):
10330 CASE_INT_FN (BUILT_IN_POPCOUNT):
10331 CASE_INT_FN (BUILT_IN_PARITY):
10332 return fold_builtin_bitop (fndecl, arg0);
10334 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10335 return fold_builtin_signbit (arg0, type);
10337 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10338 return fold_builtin_significand (arg0, type);
10340 CASE_FLT_FN (BUILT_IN_ILOGB):
10341 CASE_FLT_FN (BUILT_IN_LOGB):
10342 return fold_builtin_logb (arg0, type);
10344 case BUILT_IN_ISASCII:
10345 return fold_builtin_isascii (arg0);
10347 case BUILT_IN_TOASCII:
10348 return fold_builtin_toascii (arg0);
10350 case BUILT_IN_ISDIGIT:
10351 return fold_builtin_isdigit (arg0);
10353 CASE_FLT_FN (BUILT_IN_FINITE):
10354 case BUILT_IN_FINITED32:
10355 case BUILT_IN_FINITED64:
10356 case BUILT_IN_FINITED128:
10357 case BUILT_IN_ISFINITE:
10358 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10360 CASE_FLT_FN (BUILT_IN_ISINF):
10361 case BUILT_IN_ISINFD32:
10362 case BUILT_IN_ISINFD64:
10363 case BUILT_IN_ISINFD128:
10364 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10366 case BUILT_IN_ISINF_SIGN:
10367 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10369 CASE_FLT_FN (BUILT_IN_ISNAN):
10370 case BUILT_IN_ISNAND32:
10371 case BUILT_IN_ISNAND64:
10372 case BUILT_IN_ISNAND128:
10373 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10375 case BUILT_IN_PRINTF:
10376 case BUILT_IN_PRINTF_UNLOCKED:
10377 case BUILT_IN_VPRINTF:
10378 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10380 default:
10381 break;
10384 return NULL_TREE;
10388 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10389 IGNORE is true if the result of the function call is ignored. This
10390 function returns NULL_TREE if no simplification was possible. */
10392 static tree
10393 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10395 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10396 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10398 switch (fcode)
10400 CASE_FLT_FN (BUILT_IN_JN):
10401 if (validate_arg (arg0, INTEGER_TYPE)
10402 && validate_arg (arg1, REAL_TYPE))
10403 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10404 break;
10406 CASE_FLT_FN (BUILT_IN_YN):
10407 if (validate_arg (arg0, INTEGER_TYPE)
10408 && validate_arg (arg1, REAL_TYPE))
10409 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10410 &dconst0, false);
10411 break;
10413 CASE_FLT_FN (BUILT_IN_DREM):
10414 CASE_FLT_FN (BUILT_IN_REMAINDER):
10415 if (validate_arg (arg0, REAL_TYPE)
10416 && validate_arg(arg1, REAL_TYPE))
10417 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10418 break;
10420 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10421 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10422 if (validate_arg (arg0, REAL_TYPE)
10423 && validate_arg(arg1, POINTER_TYPE))
10424 return do_mpfr_lgamma_r (arg0, arg1, type);
10425 break;
10427 CASE_FLT_FN (BUILT_IN_ATAN2):
10428 if (validate_arg (arg0, REAL_TYPE)
10429 && validate_arg(arg1, REAL_TYPE))
10430 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10431 break;
10433 CASE_FLT_FN (BUILT_IN_FDIM):
10434 if (validate_arg (arg0, REAL_TYPE)
10435 && validate_arg(arg1, REAL_TYPE))
10436 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10437 break;
10439 CASE_FLT_FN (BUILT_IN_HYPOT):
10440 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10442 CASE_FLT_FN (BUILT_IN_LDEXP):
10443 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10444 CASE_FLT_FN (BUILT_IN_SCALBN):
10445 CASE_FLT_FN (BUILT_IN_SCALBLN):
10446 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10448 CASE_FLT_FN (BUILT_IN_FREXP):
10449 return fold_builtin_frexp (arg0, arg1, type);
10451 CASE_FLT_FN (BUILT_IN_MODF):
10452 return fold_builtin_modf (arg0, arg1, type);
10454 case BUILT_IN_BZERO:
10455 return fold_builtin_bzero (arg0, arg1, ignore);
10457 case BUILT_IN_FPUTS:
10458 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10460 case BUILT_IN_FPUTS_UNLOCKED:
10461 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10463 case BUILT_IN_STRSTR:
10464 return fold_builtin_strstr (arg0, arg1, type);
10466 case BUILT_IN_STRCAT:
10467 return fold_builtin_strcat (arg0, arg1);
10469 case BUILT_IN_STRSPN:
10470 return fold_builtin_strspn (arg0, arg1);
10472 case BUILT_IN_STRCSPN:
10473 return fold_builtin_strcspn (arg0, arg1);
10475 case BUILT_IN_STRCHR:
10476 case BUILT_IN_INDEX:
10477 return fold_builtin_strchr (arg0, arg1, type);
10479 case BUILT_IN_STRRCHR:
10480 case BUILT_IN_RINDEX:
10481 return fold_builtin_strrchr (arg0, arg1, type);
10483 case BUILT_IN_STRCPY:
10484 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10486 case BUILT_IN_STPCPY:
10487 if (ignore)
10489 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10490 if (!fn)
10491 break;
10493 return build_call_expr (fn, 2, arg0, arg1);
10495 break;
10497 case BUILT_IN_STRCMP:
10498 return fold_builtin_strcmp (arg0, arg1);
10500 case BUILT_IN_STRPBRK:
10501 return fold_builtin_strpbrk (arg0, arg1, type);
10503 case BUILT_IN_EXPECT:
10504 return fold_builtin_expect (arg0, arg1);
10506 CASE_FLT_FN (BUILT_IN_POW):
10507 return fold_builtin_pow (fndecl, arg0, arg1, type);
10509 CASE_FLT_FN (BUILT_IN_POWI):
10510 return fold_builtin_powi (fndecl, arg0, arg1, type);
10512 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10513 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10515 CASE_FLT_FN (BUILT_IN_FMIN):
10516 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10518 CASE_FLT_FN (BUILT_IN_FMAX):
10519 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10521 case BUILT_IN_ISGREATER:
10522 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10523 case BUILT_IN_ISGREATEREQUAL:
10524 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10525 case BUILT_IN_ISLESS:
10526 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10527 case BUILT_IN_ISLESSEQUAL:
10528 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10529 case BUILT_IN_ISLESSGREATER:
10530 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10531 case BUILT_IN_ISUNORDERED:
10532 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10533 NOP_EXPR);
10535 /* We do the folding for va_start in the expander. */
10536 case BUILT_IN_VA_START:
10537 break;
10539 case BUILT_IN_SPRINTF:
10540 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10542 case BUILT_IN_OBJECT_SIZE:
10543 return fold_builtin_object_size (arg0, arg1);
10545 case BUILT_IN_PRINTF:
10546 case BUILT_IN_PRINTF_UNLOCKED:
10547 case BUILT_IN_VPRINTF:
10548 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10550 case BUILT_IN_PRINTF_CHK:
10551 case BUILT_IN_VPRINTF_CHK:
10552 if (!validate_arg (arg0, INTEGER_TYPE)
10553 || TREE_SIDE_EFFECTS (arg0))
10554 return NULL_TREE;
10555 else
10556 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10557 break;
10559 case BUILT_IN_FPRINTF:
10560 case BUILT_IN_FPRINTF_UNLOCKED:
10561 case BUILT_IN_VFPRINTF:
10562 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10563 ignore, fcode);
10565 default:
10566 break;
10568 return NULL_TREE;
10571 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10572 and ARG2. IGNORE is true if the result of the function call is ignored.
10573 This function returns NULL_TREE if no simplification was possible. */
10575 static tree
10576 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10578 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10579 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10580 switch (fcode)
10583 CASE_FLT_FN (BUILT_IN_SINCOS):
10584 return fold_builtin_sincos (arg0, arg1, arg2);
10586 CASE_FLT_FN (BUILT_IN_FMA):
10587 if (validate_arg (arg0, REAL_TYPE)
10588 && validate_arg(arg1, REAL_TYPE)
10589 && validate_arg(arg2, REAL_TYPE))
10590 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10591 break;
10593 CASE_FLT_FN (BUILT_IN_REMQUO):
10594 if (validate_arg (arg0, REAL_TYPE)
10595 && validate_arg(arg1, REAL_TYPE)
10596 && validate_arg(arg2, POINTER_TYPE))
10597 return do_mpfr_remquo (arg0, arg1, arg2);
10598 break;
10600 case BUILT_IN_MEMSET:
10601 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10603 case BUILT_IN_BCOPY:
10604 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10606 case BUILT_IN_MEMCPY:
10607 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10609 case BUILT_IN_MEMPCPY:
10610 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10612 case BUILT_IN_MEMMOVE:
10613 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10615 case BUILT_IN_STRNCAT:
10616 return fold_builtin_strncat (arg0, arg1, arg2);
10618 case BUILT_IN_STRNCPY:
10619 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10621 case BUILT_IN_STRNCMP:
10622 return fold_builtin_strncmp (arg0, arg1, arg2);
10624 case BUILT_IN_MEMCHR:
10625 return fold_builtin_memchr (arg0, arg1, arg2, type);
10627 case BUILT_IN_BCMP:
10628 case BUILT_IN_MEMCMP:
10629 return fold_builtin_memcmp (arg0, arg1, arg2);;
10631 case BUILT_IN_SPRINTF:
10632 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10634 case BUILT_IN_STRCPY_CHK:
10635 case BUILT_IN_STPCPY_CHK:
10636 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10637 ignore, fcode);
10639 case BUILT_IN_STRCAT_CHK:
10640 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10642 case BUILT_IN_PRINTF_CHK:
10643 case BUILT_IN_VPRINTF_CHK:
10644 if (!validate_arg (arg0, INTEGER_TYPE)
10645 || TREE_SIDE_EFFECTS (arg0))
10646 return NULL_TREE;
10647 else
10648 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10649 break;
10651 case BUILT_IN_FPRINTF:
10652 case BUILT_IN_FPRINTF_UNLOCKED:
10653 case BUILT_IN_VFPRINTF:
10654 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10656 case BUILT_IN_FPRINTF_CHK:
10657 case BUILT_IN_VFPRINTF_CHK:
10658 if (!validate_arg (arg1, INTEGER_TYPE)
10659 || TREE_SIDE_EFFECTS (arg1))
10660 return NULL_TREE;
10661 else
10662 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10663 ignore, fcode);
10665 default:
10666 break;
10668 return NULL_TREE;
10671 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10672 ARG2, and ARG3. IGNORE is true if the result of the function call is
10673 ignored. This function returns NULL_TREE if no simplification was
10674 possible. */
10676 static tree
10677 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10678 bool ignore)
10680 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10682 switch (fcode)
10684 case BUILT_IN_MEMCPY_CHK:
10685 case BUILT_IN_MEMPCPY_CHK:
10686 case BUILT_IN_MEMMOVE_CHK:
10687 case BUILT_IN_MEMSET_CHK:
10688 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10689 NULL_TREE, ignore,
10690 DECL_FUNCTION_CODE (fndecl));
10692 case BUILT_IN_STRNCPY_CHK:
10693 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10695 case BUILT_IN_STRNCAT_CHK:
10696 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10698 case BUILT_IN_FPRINTF_CHK:
10699 case BUILT_IN_VFPRINTF_CHK:
10700 if (!validate_arg (arg1, INTEGER_TYPE)
10701 || TREE_SIDE_EFFECTS (arg1))
10702 return NULL_TREE;
10703 else
10704 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10705 ignore, fcode);
10706 break;
10708 default:
10709 break;
10711 return NULL_TREE;
10714 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10715 arguments, where NARGS <= 4. IGNORE is true if the result of the
10716 function call is ignored. This function returns NULL_TREE if no
10717 simplification was possible. Note that this only folds builtins with
10718 fixed argument patterns. Foldings that do varargs-to-varargs
10719 transformations, or that match calls with more than 4 arguments,
10720 need to be handled with fold_builtin_varargs instead. */
10722 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10724 static tree
10725 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10727 tree ret = NULL_TREE;
10729 switch (nargs)
10731 case 0:
10732 ret = fold_builtin_0 (fndecl, ignore);
10733 break;
10734 case 1:
10735 ret = fold_builtin_1 (fndecl, args[0], ignore);
10736 break;
10737 case 2:
10738 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10739 break;
10740 case 3:
10741 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10742 break;
10743 case 4:
10744 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10745 ignore);
10746 break;
10747 default:
10748 break;
10750 if (ret)
10752 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10753 TREE_NO_WARNING (ret) = 1;
10754 return ret;
10756 return NULL_TREE;
10759 /* Builtins with folding operations that operate on "..." arguments
10760 need special handling; we need to store the arguments in a convenient
10761 data structure before attempting any folding. Fortunately there are
10762 only a few builtins that fall into this category. FNDECL is the
10763 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10764 result of the function call is ignored. */
10766 static tree
10767 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10769 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10770 tree ret = NULL_TREE;
10772 switch (fcode)
10774 case BUILT_IN_SPRINTF_CHK:
10775 case BUILT_IN_VSPRINTF_CHK:
10776 ret = fold_builtin_sprintf_chk (exp, fcode);
10777 break;
10779 case BUILT_IN_SNPRINTF_CHK:
10780 case BUILT_IN_VSNPRINTF_CHK:
10781 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10782 break;
10784 case BUILT_IN_FPCLASSIFY:
10785 ret = fold_builtin_fpclassify (exp);
10786 break;
10788 default:
10789 break;
10791 if (ret)
10793 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10794 TREE_NO_WARNING (ret) = 1;
10795 return ret;
10797 return NULL_TREE;
10800 /* A wrapper function for builtin folding that prevents warnings for
10801 "statement without effect" and the like, caused by removing the
10802 call node earlier than the warning is generated. */
10804 tree
10805 fold_call_expr (tree exp, bool ignore)
10807 tree ret = NULL_TREE;
10808 tree fndecl = get_callee_fndecl (exp);
10809 if (fndecl
10810 && TREE_CODE (fndecl) == FUNCTION_DECL
10811 && DECL_BUILT_IN (fndecl)
10812 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10813 yet. Defer folding until we see all the arguments
10814 (after inlining). */
10815 && !CALL_EXPR_VA_ARG_PACK (exp))
10817 int nargs = call_expr_nargs (exp);
10819 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10820 instead last argument is __builtin_va_arg_pack (). Defer folding
10821 even in that case, until arguments are finalized. */
10822 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10824 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10825 if (fndecl2
10826 && TREE_CODE (fndecl2) == FUNCTION_DECL
10827 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10828 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10829 return NULL_TREE;
10832 /* FIXME: Don't use a list in this interface. */
10833 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10834 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10835 else
10837 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10839 tree *args = CALL_EXPR_ARGP (exp);
10840 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10842 if (!ret)
10843 ret = fold_builtin_varargs (fndecl, exp, ignore);
10844 if (ret)
10846 /* Propagate location information from original call to
10847 expansion of builtin. Otherwise things like
10848 maybe_emit_chk_warning, that operate on the expansion
10849 of a builtin, will use the wrong location information. */
10850 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10852 tree realret = ret;
10853 if (TREE_CODE (ret) == NOP_EXPR)
10854 realret = TREE_OPERAND (ret, 0);
10855 if (CAN_HAVE_LOCATION_P (realret)
10856 && !EXPR_HAS_LOCATION (realret))
10857 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10858 return realret;
10860 return ret;
10864 return NULL_TREE;
10867 /* Conveniently construct a function call expression. FNDECL names the
10868 function to be called and ARGLIST is a TREE_LIST of arguments. */
10870 tree
10871 build_function_call_expr (tree fndecl, tree arglist)
10873 tree fntype = TREE_TYPE (fndecl);
10874 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10875 int n = list_length (arglist);
10876 tree *argarray = (tree *) alloca (n * sizeof (tree));
10877 int i;
10879 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10880 argarray[i] = TREE_VALUE (arglist);
10881 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10884 /* Conveniently construct a function call expression. FNDECL names the
10885 function to be called, N is the number of arguments, and the "..."
10886 parameters are the argument expressions. */
10888 tree
10889 build_call_expr (tree fndecl, int n, ...)
10891 va_list ap;
10892 tree fntype = TREE_TYPE (fndecl);
10893 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10894 tree *argarray = (tree *) alloca (n * sizeof (tree));
10895 int i;
10897 va_start (ap, n);
10898 for (i = 0; i < n; i++)
10899 argarray[i] = va_arg (ap, tree);
10900 va_end (ap);
10901 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10904 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10905 N arguments are passed in the array ARGARRAY. */
10907 tree
10908 fold_builtin_call_array (tree type,
10909 tree fn,
10910 int n,
10911 tree *argarray)
10913 tree ret = NULL_TREE;
10914 int i;
10915 tree exp;
10917 if (TREE_CODE (fn) == ADDR_EXPR)
10919 tree fndecl = TREE_OPERAND (fn, 0);
10920 if (TREE_CODE (fndecl) == FUNCTION_DECL
10921 && DECL_BUILT_IN (fndecl))
10923 /* If last argument is __builtin_va_arg_pack (), arguments to this
10924 function are not finalized yet. Defer folding until they are. */
10925 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10927 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10928 if (fndecl2
10929 && TREE_CODE (fndecl2) == FUNCTION_DECL
10930 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10931 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10932 return build_call_array (type, fn, n, argarray);
10934 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10936 tree arglist = NULL_TREE;
10937 for (i = n - 1; i >= 0; i--)
10938 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10939 ret = targetm.fold_builtin (fndecl, arglist, false);
10940 if (ret)
10941 return ret;
10943 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10945 /* First try the transformations that don't require consing up
10946 an exp. */
10947 ret = fold_builtin_n (fndecl, argarray, n, false);
10948 if (ret)
10949 return ret;
10952 /* If we got this far, we need to build an exp. */
10953 exp = build_call_array (type, fn, n, argarray);
10954 ret = fold_builtin_varargs (fndecl, exp, false);
10955 return ret ? ret : exp;
10959 return build_call_array (type, fn, n, argarray);
10962 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10963 along with N new arguments specified as the "..." parameters. SKIP
10964 is the number of arguments in EXP to be omitted. This function is used
10965 to do varargs-to-varargs transformations. */
10967 static tree
10968 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10970 int oldnargs = call_expr_nargs (exp);
10971 int nargs = oldnargs - skip + n;
10972 tree fntype = TREE_TYPE (fndecl);
10973 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10974 tree *buffer;
10976 if (n > 0)
10978 int i, j;
10979 va_list ap;
10981 buffer = XALLOCAVEC (tree, nargs);
10982 va_start (ap, n);
10983 for (i = 0; i < n; i++)
10984 buffer[i] = va_arg (ap, tree);
10985 va_end (ap);
10986 for (j = skip; j < oldnargs; j++, i++)
10987 buffer[i] = CALL_EXPR_ARG (exp, j);
10989 else
10990 buffer = CALL_EXPR_ARGP (exp) + skip;
10992 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10995 /* Validate a single argument ARG against a tree code CODE representing
10996 a type. */
10998 static bool
10999 validate_arg (const_tree arg, enum tree_code code)
11001 if (!arg)
11002 return false;
11003 else if (code == POINTER_TYPE)
11004 return POINTER_TYPE_P (TREE_TYPE (arg));
11005 else if (code == INTEGER_TYPE)
11006 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11007 return code == TREE_CODE (TREE_TYPE (arg));
11010 /* This function validates the types of a function call argument list
11011 against a specified list of tree_codes. If the last specifier is a 0,
11012 that represents an ellipses, otherwise the last specifier must be a
11013 VOID_TYPE.
11015 This is the GIMPLE version of validate_arglist. Eventually we want to
11016 completely convert builtins.c to work from GIMPLEs and the tree based
11017 validate_arglist will then be removed. */
11019 bool
11020 validate_gimple_arglist (const_gimple call, ...)
11022 enum tree_code code;
11023 bool res = 0;
11024 va_list ap;
11025 const_tree arg;
11026 size_t i;
11028 va_start (ap, call);
11029 i = 0;
11033 code = va_arg (ap, enum tree_code);
11034 switch (code)
11036 case 0:
11037 /* This signifies an ellipses, any further arguments are all ok. */
11038 res = true;
11039 goto end;
11040 case VOID_TYPE:
11041 /* This signifies an endlink, if no arguments remain, return
11042 true, otherwise return false. */
11043 res = (i == gimple_call_num_args (call));
11044 goto end;
11045 default:
11046 /* If no parameters remain or the parameter's code does not
11047 match the specified code, return false. Otherwise continue
11048 checking any remaining arguments. */
11049 arg = gimple_call_arg (call, i++);
11050 if (!validate_arg (arg, code))
11051 goto end;
11052 break;
11055 while (1);
11057 /* We need gotos here since we can only have one VA_CLOSE in a
11058 function. */
11059 end: ;
11060 va_end (ap);
11062 return res;
11065 /* This function validates the types of a function call argument list
11066 against a specified list of tree_codes. If the last specifier is a 0,
11067 that represents an ellipses, otherwise the last specifier must be a
11068 VOID_TYPE. */
11070 bool
11071 validate_arglist (const_tree callexpr, ...)
11073 enum tree_code code;
11074 bool res = 0;
11075 va_list ap;
11076 const_call_expr_arg_iterator iter;
11077 const_tree arg;
11079 va_start (ap, callexpr);
11080 init_const_call_expr_arg_iterator (callexpr, &iter);
11084 code = va_arg (ap, enum tree_code);
11085 switch (code)
11087 case 0:
11088 /* This signifies an ellipses, any further arguments are all ok. */
11089 res = true;
11090 goto end;
11091 case VOID_TYPE:
11092 /* This signifies an endlink, if no arguments remain, return
11093 true, otherwise return false. */
11094 res = !more_const_call_expr_args_p (&iter);
11095 goto end;
11096 default:
11097 /* If no parameters remain or the parameter's code does not
11098 match the specified code, return false. Otherwise continue
11099 checking any remaining arguments. */
11100 arg = next_const_call_expr_arg (&iter);
11101 if (!validate_arg (arg, code))
11102 goto end;
11103 break;
11106 while (1);
11108 /* We need gotos here since we can only have one VA_CLOSE in a
11109 function. */
11110 end: ;
11111 va_end (ap);
11113 return res;
11116 /* Default target-specific builtin expander that does nothing. */
11119 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11120 rtx target ATTRIBUTE_UNUSED,
11121 rtx subtarget ATTRIBUTE_UNUSED,
11122 enum machine_mode mode ATTRIBUTE_UNUSED,
11123 int ignore ATTRIBUTE_UNUSED)
11125 return NULL_RTX;
11128 /* Returns true is EXP represents data that would potentially reside
11129 in a readonly section. */
11131 static bool
11132 readonly_data_expr (tree exp)
11134 STRIP_NOPS (exp);
11136 if (TREE_CODE (exp) != ADDR_EXPR)
11137 return false;
11139 exp = get_base_address (TREE_OPERAND (exp, 0));
11140 if (!exp)
11141 return false;
11143 /* Make sure we call decl_readonly_section only for trees it
11144 can handle (since it returns true for everything it doesn't
11145 understand). */
11146 if (TREE_CODE (exp) == STRING_CST
11147 || TREE_CODE (exp) == CONSTRUCTOR
11148 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11149 return decl_readonly_section (exp, 0);
11150 else
11151 return false;
11154 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11155 to the call, and TYPE is its return type.
11157 Return NULL_TREE if no simplification was possible, otherwise return the
11158 simplified form of the call as a tree.
11160 The simplified form may be a constant or other expression which
11161 computes the same value, but in a more efficient manner (including
11162 calls to other builtin functions).
11164 The call may contain arguments which need to be evaluated, but
11165 which are not useful to determine the result of the call. In
11166 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11167 COMPOUND_EXPR will be an argument which must be evaluated.
11168 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11169 COMPOUND_EXPR in the chain will contain the tree for the simplified
11170 form of the builtin function call. */
11172 static tree
11173 fold_builtin_strstr (tree s1, tree s2, tree type)
11175 if (!validate_arg (s1, POINTER_TYPE)
11176 || !validate_arg (s2, POINTER_TYPE))
11177 return NULL_TREE;
11178 else
11180 tree fn;
11181 const char *p1, *p2;
11183 p2 = c_getstr (s2);
11184 if (p2 == NULL)
11185 return NULL_TREE;
11187 p1 = c_getstr (s1);
11188 if (p1 != NULL)
11190 const char *r = strstr (p1, p2);
11191 tree tem;
11193 if (r == NULL)
11194 return build_int_cst (TREE_TYPE (s1), 0);
11196 /* Return an offset into the constant string argument. */
11197 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11198 s1, size_int (r - p1));
11199 return fold_convert (type, tem);
11202 /* The argument is const char *, and the result is char *, so we need
11203 a type conversion here to avoid a warning. */
11204 if (p2[0] == '\0')
11205 return fold_convert (type, s1);
11207 if (p2[1] != '\0')
11208 return NULL_TREE;
11210 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11211 if (!fn)
11212 return NULL_TREE;
11214 /* New argument list transforming strstr(s1, s2) to
11215 strchr(s1, s2[0]). */
11216 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11220 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11221 the call, and TYPE is its return type.
11223 Return NULL_TREE if no simplification was possible, otherwise return the
11224 simplified form of the call as a tree.
11226 The simplified form may be a constant or other expression which
11227 computes the same value, but in a more efficient manner (including
11228 calls to other builtin functions).
11230 The call may contain arguments which need to be evaluated, but
11231 which are not useful to determine the result of the call. In
11232 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11233 COMPOUND_EXPR will be an argument which must be evaluated.
11234 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11235 COMPOUND_EXPR in the chain will contain the tree for the simplified
11236 form of the builtin function call. */
11238 static tree
11239 fold_builtin_strchr (tree s1, tree s2, tree type)
11241 if (!validate_arg (s1, POINTER_TYPE)
11242 || !validate_arg (s2, INTEGER_TYPE))
11243 return NULL_TREE;
11244 else
11246 const char *p1;
11248 if (TREE_CODE (s2) != INTEGER_CST)
11249 return NULL_TREE;
11251 p1 = c_getstr (s1);
11252 if (p1 != NULL)
11254 char c;
11255 const char *r;
11256 tree tem;
11258 if (target_char_cast (s2, &c))
11259 return NULL_TREE;
11261 r = strchr (p1, c);
11263 if (r == NULL)
11264 return build_int_cst (TREE_TYPE (s1), 0);
11266 /* Return an offset into the constant string argument. */
11267 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11268 s1, size_int (r - p1));
11269 return fold_convert (type, tem);
11271 return NULL_TREE;
11275 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11276 the call, and TYPE is its return type.
11278 Return NULL_TREE if no simplification was possible, otherwise return the
11279 simplified form of the call as a tree.
11281 The simplified form may be a constant or other expression which
11282 computes the same value, but in a more efficient manner (including
11283 calls to other builtin functions).
11285 The call may contain arguments which need to be evaluated, but
11286 which are not useful to determine the result of the call. In
11287 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11288 COMPOUND_EXPR will be an argument which must be evaluated.
11289 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11290 COMPOUND_EXPR in the chain will contain the tree for the simplified
11291 form of the builtin function call. */
11293 static tree
11294 fold_builtin_strrchr (tree s1, tree s2, tree type)
11296 if (!validate_arg (s1, POINTER_TYPE)
11297 || !validate_arg (s2, INTEGER_TYPE))
11298 return NULL_TREE;
11299 else
11301 tree fn;
11302 const char *p1;
11304 if (TREE_CODE (s2) != INTEGER_CST)
11305 return NULL_TREE;
11307 p1 = c_getstr (s1);
11308 if (p1 != NULL)
11310 char c;
11311 const char *r;
11312 tree tem;
11314 if (target_char_cast (s2, &c))
11315 return NULL_TREE;
11317 r = strrchr (p1, c);
11319 if (r == NULL)
11320 return build_int_cst (TREE_TYPE (s1), 0);
11322 /* Return an offset into the constant string argument. */
11323 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11324 s1, size_int (r - p1));
11325 return fold_convert (type, tem);
11328 if (! integer_zerop (s2))
11329 return NULL_TREE;
11331 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11332 if (!fn)
11333 return NULL_TREE;
11335 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11336 return build_call_expr (fn, 2, s1, s2);
11340 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11341 to the call, and TYPE is its return type.
11343 Return NULL_TREE if no simplification was possible, otherwise return the
11344 simplified form of the call as a tree.
11346 The simplified form may be a constant or other expression which
11347 computes the same value, but in a more efficient manner (including
11348 calls to other builtin functions).
11350 The call may contain arguments which need to be evaluated, but
11351 which are not useful to determine the result of the call. In
11352 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11353 COMPOUND_EXPR will be an argument which must be evaluated.
11354 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11355 COMPOUND_EXPR in the chain will contain the tree for the simplified
11356 form of the builtin function call. */
11358 static tree
11359 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11361 if (!validate_arg (s1, POINTER_TYPE)
11362 || !validate_arg (s2, POINTER_TYPE))
11363 return NULL_TREE;
11364 else
11366 tree fn;
11367 const char *p1, *p2;
11369 p2 = c_getstr (s2);
11370 if (p2 == NULL)
11371 return NULL_TREE;
11373 p1 = c_getstr (s1);
11374 if (p1 != NULL)
11376 const char *r = strpbrk (p1, p2);
11377 tree tem;
11379 if (r == NULL)
11380 return build_int_cst (TREE_TYPE (s1), 0);
11382 /* Return an offset into the constant string argument. */
11383 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11384 s1, size_int (r - p1));
11385 return fold_convert (type, tem);
11388 if (p2[0] == '\0')
11389 /* strpbrk(x, "") == NULL.
11390 Evaluate and ignore s1 in case it had side-effects. */
11391 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11393 if (p2[1] != '\0')
11394 return NULL_TREE; /* Really call strpbrk. */
11396 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11397 if (!fn)
11398 return NULL_TREE;
11400 /* New argument list transforming strpbrk(s1, s2) to
11401 strchr(s1, s2[0]). */
11402 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11406 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11407 to the call.
11409 Return NULL_TREE if no simplification was possible, otherwise return the
11410 simplified form of the call as a tree.
11412 The simplified form may be a constant or other expression which
11413 computes the same value, but in a more efficient manner (including
11414 calls to other builtin functions).
11416 The call may contain arguments which need to be evaluated, but
11417 which are not useful to determine the result of the call. In
11418 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11419 COMPOUND_EXPR will be an argument which must be evaluated.
11420 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11421 COMPOUND_EXPR in the chain will contain the tree for the simplified
11422 form of the builtin function call. */
11424 static tree
11425 fold_builtin_strcat (tree dst, tree src)
11427 if (!validate_arg (dst, POINTER_TYPE)
11428 || !validate_arg (src, POINTER_TYPE))
11429 return NULL_TREE;
11430 else
11432 const char *p = c_getstr (src);
11434 /* If the string length is zero, return the dst parameter. */
11435 if (p && *p == '\0')
11436 return dst;
11438 return NULL_TREE;
11442 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11443 arguments to the call.
11445 Return NULL_TREE if no simplification was possible, otherwise return the
11446 simplified form of the call as a tree.
11448 The simplified form may be a constant or other expression which
11449 computes the same value, but in a more efficient manner (including
11450 calls to other builtin functions).
11452 The call may contain arguments which need to be evaluated, but
11453 which are not useful to determine the result of the call. In
11454 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11455 COMPOUND_EXPR will be an argument which must be evaluated.
11456 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11457 COMPOUND_EXPR in the chain will contain the tree for the simplified
11458 form of the builtin function call. */
11460 static tree
11461 fold_builtin_strncat (tree dst, tree src, tree len)
11463 if (!validate_arg (dst, POINTER_TYPE)
11464 || !validate_arg (src, POINTER_TYPE)
11465 || !validate_arg (len, INTEGER_TYPE))
11466 return NULL_TREE;
11467 else
11469 const char *p = c_getstr (src);
11471 /* If the requested length is zero, or the src parameter string
11472 length is zero, return the dst parameter. */
11473 if (integer_zerop (len) || (p && *p == '\0'))
11474 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11476 /* If the requested len is greater than or equal to the string
11477 length, call strcat. */
11478 if (TREE_CODE (len) == INTEGER_CST && p
11479 && compare_tree_int (len, strlen (p)) >= 0)
11481 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11483 /* If the replacement _DECL isn't initialized, don't do the
11484 transformation. */
11485 if (!fn)
11486 return NULL_TREE;
11488 return build_call_expr (fn, 2, dst, src);
11490 return NULL_TREE;
11494 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11495 to the call.
11497 Return NULL_TREE if no simplification was possible, otherwise return the
11498 simplified form of the call as a tree.
11500 The simplified form may be a constant or other expression which
11501 computes the same value, but in a more efficient manner (including
11502 calls to other builtin functions).
11504 The call may contain arguments which need to be evaluated, but
11505 which are not useful to determine the result of the call. In
11506 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11507 COMPOUND_EXPR will be an argument which must be evaluated.
11508 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11509 COMPOUND_EXPR in the chain will contain the tree for the simplified
11510 form of the builtin function call. */
11512 static tree
11513 fold_builtin_strspn (tree s1, tree s2)
11515 if (!validate_arg (s1, POINTER_TYPE)
11516 || !validate_arg (s2, POINTER_TYPE))
11517 return NULL_TREE;
11518 else
11520 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11522 /* If both arguments are constants, evaluate at compile-time. */
11523 if (p1 && p2)
11525 const size_t r = strspn (p1, p2);
11526 return size_int (r);
11529 /* If either argument is "", return NULL_TREE. */
11530 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11531 /* Evaluate and ignore both arguments in case either one has
11532 side-effects. */
11533 return omit_two_operands (size_type_node, size_zero_node,
11534 s1, s2);
11535 return NULL_TREE;
11539 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11540 to the call.
11542 Return NULL_TREE if no simplification was possible, otherwise return the
11543 simplified form of the call as a tree.
11545 The simplified form may be a constant or other expression which
11546 computes the same value, but in a more efficient manner (including
11547 calls to other builtin functions).
11549 The call may contain arguments which need to be evaluated, but
11550 which are not useful to determine the result of the call. In
11551 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11552 COMPOUND_EXPR will be an argument which must be evaluated.
11553 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11554 COMPOUND_EXPR in the chain will contain the tree for the simplified
11555 form of the builtin function call. */
11557 static tree
11558 fold_builtin_strcspn (tree s1, tree s2)
11560 if (!validate_arg (s1, POINTER_TYPE)
11561 || !validate_arg (s2, POINTER_TYPE))
11562 return NULL_TREE;
11563 else
11565 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11567 /* If both arguments are constants, evaluate at compile-time. */
11568 if (p1 && p2)
11570 const size_t r = strcspn (p1, p2);
11571 return size_int (r);
11574 /* If the first argument is "", return NULL_TREE. */
11575 if (p1 && *p1 == '\0')
11577 /* Evaluate and ignore argument s2 in case it has
11578 side-effects. */
11579 return omit_one_operand (size_type_node,
11580 size_zero_node, s2);
11583 /* If the second argument is "", return __builtin_strlen(s1). */
11584 if (p2 && *p2 == '\0')
11586 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11588 /* If the replacement _DECL isn't initialized, don't do the
11589 transformation. */
11590 if (!fn)
11591 return NULL_TREE;
11593 return build_call_expr (fn, 1, s1);
11595 return NULL_TREE;
11599 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11600 to the call. IGNORE is true if the value returned
11601 by the builtin will be ignored. UNLOCKED is true is true if this
11602 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11603 the known length of the string. Return NULL_TREE if no simplification
11604 was possible. */
11606 tree
11607 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11609 /* If we're using an unlocked function, assume the other unlocked
11610 functions exist explicitly. */
11611 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11612 : implicit_built_in_decls[BUILT_IN_FPUTC];
11613 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11614 : implicit_built_in_decls[BUILT_IN_FWRITE];
11616 /* If the return value is used, don't do the transformation. */
11617 if (!ignore)
11618 return NULL_TREE;
11620 /* Verify the arguments in the original call. */
11621 if (!validate_arg (arg0, POINTER_TYPE)
11622 || !validate_arg (arg1, POINTER_TYPE))
11623 return NULL_TREE;
11625 if (! len)
11626 len = c_strlen (arg0, 0);
11628 /* Get the length of the string passed to fputs. If the length
11629 can't be determined, punt. */
11630 if (!len
11631 || TREE_CODE (len) != INTEGER_CST)
11632 return NULL_TREE;
11634 switch (compare_tree_int (len, 1))
11636 case -1: /* length is 0, delete the call entirely . */
11637 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11639 case 0: /* length is 1, call fputc. */
11641 const char *p = c_getstr (arg0);
11643 if (p != NULL)
11645 if (fn_fputc)
11646 return build_call_expr (fn_fputc, 2,
11647 build_int_cst (NULL_TREE, p[0]), arg1);
11648 else
11649 return NULL_TREE;
11652 /* FALLTHROUGH */
11653 case 1: /* length is greater than 1, call fwrite. */
11655 /* If optimizing for size keep fputs. */
11656 if (optimize_function_for_size_p (cfun))
11657 return NULL_TREE;
11658 /* New argument list transforming fputs(string, stream) to
11659 fwrite(string, 1, len, stream). */
11660 if (fn_fwrite)
11661 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11662 else
11663 return NULL_TREE;
11665 default:
11666 gcc_unreachable ();
11668 return NULL_TREE;
11671 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11672 produced. False otherwise. This is done so that we don't output the error
11673 or warning twice or three times. */
11675 bool
11676 fold_builtin_next_arg (tree exp, bool va_start_p)
11678 tree fntype = TREE_TYPE (current_function_decl);
11679 int nargs = call_expr_nargs (exp);
11680 tree arg;
11682 if (TYPE_ARG_TYPES (fntype) == 0
11683 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11684 == void_type_node))
11686 error ("%<va_start%> used in function with fixed args");
11687 return true;
11690 if (va_start_p)
11692 if (va_start_p && (nargs != 2))
11694 error ("wrong number of arguments to function %<va_start%>");
11695 return true;
11697 arg = CALL_EXPR_ARG (exp, 1);
11699 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11700 when we checked the arguments and if needed issued a warning. */
11701 else
11703 if (nargs == 0)
11705 /* Evidently an out of date version of <stdarg.h>; can't validate
11706 va_start's second argument, but can still work as intended. */
11707 warning (0, "%<__builtin_next_arg%> called without an argument");
11708 return true;
11710 else if (nargs > 1)
11712 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11713 return true;
11715 arg = CALL_EXPR_ARG (exp, 0);
11718 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11719 or __builtin_next_arg (0) the first time we see it, after checking
11720 the arguments and if needed issuing a warning. */
11721 if (!integer_zerop (arg))
11723 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11725 /* Strip off all nops for the sake of the comparison. This
11726 is not quite the same as STRIP_NOPS. It does more.
11727 We must also strip off INDIRECT_EXPR for C++ reference
11728 parameters. */
11729 while (CONVERT_EXPR_P (arg)
11730 || TREE_CODE (arg) == INDIRECT_REF)
11731 arg = TREE_OPERAND (arg, 0);
11732 if (arg != last_parm)
11734 /* FIXME: Sometimes with the tree optimizers we can get the
11735 not the last argument even though the user used the last
11736 argument. We just warn and set the arg to be the last
11737 argument so that we will get wrong-code because of
11738 it. */
11739 warning (0, "second parameter of %<va_start%> not last named argument");
11742 /* Undefined by C99 7.15.1.4p4 (va_start):
11743 "If the parameter parmN is declared with the register storage
11744 class, with a function or array type, or with a type that is
11745 not compatible with the type that results after application of
11746 the default argument promotions, the behavior is undefined."
11748 else if (DECL_REGISTER (arg))
11749 warning (0, "undefined behaviour when second parameter of "
11750 "%<va_start%> is declared with %<register%> storage");
11752 /* We want to verify the second parameter just once before the tree
11753 optimizers are run and then avoid keeping it in the tree,
11754 as otherwise we could warn even for correct code like:
11755 void foo (int i, ...)
11756 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11757 if (va_start_p)
11758 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11759 else
11760 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11762 return false;
11766 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11767 ORIG may be null if this is a 2-argument call. We don't attempt to
11768 simplify calls with more than 3 arguments.
11770 Return NULL_TREE if no simplification was possible, otherwise return the
11771 simplified form of the call as a tree. If IGNORED is true, it means that
11772 the caller does not use the returned value of the function. */
11774 static tree
11775 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11777 tree call, retval;
11778 const char *fmt_str = NULL;
11780 /* Verify the required arguments in the original call. We deal with two
11781 types of sprintf() calls: 'sprintf (str, fmt)' and
11782 'sprintf (dest, "%s", orig)'. */
11783 if (!validate_arg (dest, POINTER_TYPE)
11784 || !validate_arg (fmt, POINTER_TYPE))
11785 return NULL_TREE;
11786 if (orig && !validate_arg (orig, POINTER_TYPE))
11787 return NULL_TREE;
11789 /* Check whether the format is a literal string constant. */
11790 fmt_str = c_getstr (fmt);
11791 if (fmt_str == NULL)
11792 return NULL_TREE;
11794 call = NULL_TREE;
11795 retval = NULL_TREE;
11797 if (!init_target_chars ())
11798 return NULL_TREE;
11800 /* If the format doesn't contain % args or %%, use strcpy. */
11801 if (strchr (fmt_str, target_percent) == NULL)
11803 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11805 if (!fn)
11806 return NULL_TREE;
11808 /* Don't optimize sprintf (buf, "abc", ptr++). */
11809 if (orig)
11810 return NULL_TREE;
11812 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11813 'format' is known to contain no % formats. */
11814 call = build_call_expr (fn, 2, dest, fmt);
11815 if (!ignored)
11816 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11819 /* If the format is "%s", use strcpy if the result isn't used. */
11820 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11822 tree fn;
11823 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11825 if (!fn)
11826 return NULL_TREE;
11828 /* Don't crash on sprintf (str1, "%s"). */
11829 if (!orig)
11830 return NULL_TREE;
11832 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11833 if (!ignored)
11835 retval = c_strlen (orig, 1);
11836 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11837 return NULL_TREE;
11839 call = build_call_expr (fn, 2, dest, orig);
11842 if (call && retval)
11844 retval = fold_convert
11845 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11846 retval);
11847 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11849 else
11850 return call;
11853 /* Expand a call EXP to __builtin_object_size. */
11856 expand_builtin_object_size (tree exp)
11858 tree ost;
11859 int object_size_type;
11860 tree fndecl = get_callee_fndecl (exp);
11862 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11864 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11865 exp, fndecl);
11866 expand_builtin_trap ();
11867 return const0_rtx;
11870 ost = CALL_EXPR_ARG (exp, 1);
11871 STRIP_NOPS (ost);
11873 if (TREE_CODE (ost) != INTEGER_CST
11874 || tree_int_cst_sgn (ost) < 0
11875 || compare_tree_int (ost, 3) > 0)
11877 error ("%Klast argument of %D is not integer constant between 0 and 3",
11878 exp, fndecl);
11879 expand_builtin_trap ();
11880 return const0_rtx;
11883 object_size_type = tree_low_cst (ost, 0);
11885 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11888 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11889 FCODE is the BUILT_IN_* to use.
11890 Return NULL_RTX if we failed; the caller should emit a normal call,
11891 otherwise try to get the result in TARGET, if convenient (and in
11892 mode MODE if that's convenient). */
11894 static rtx
11895 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11896 enum built_in_function fcode)
11898 tree dest, src, len, size;
11900 if (!validate_arglist (exp,
11901 POINTER_TYPE,
11902 fcode == BUILT_IN_MEMSET_CHK
11903 ? INTEGER_TYPE : POINTER_TYPE,
11904 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11905 return NULL_RTX;
11907 dest = CALL_EXPR_ARG (exp, 0);
11908 src = CALL_EXPR_ARG (exp, 1);
11909 len = CALL_EXPR_ARG (exp, 2);
11910 size = CALL_EXPR_ARG (exp, 3);
11912 if (! host_integerp (size, 1))
11913 return NULL_RTX;
11915 if (host_integerp (len, 1) || integer_all_onesp (size))
11917 tree fn;
11919 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11921 warning (0, "%Kcall to %D will always overflow destination buffer",
11922 exp, get_callee_fndecl (exp));
11923 return NULL_RTX;
11926 fn = NULL_TREE;
11927 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11928 mem{cpy,pcpy,move,set} is available. */
11929 switch (fcode)
11931 case BUILT_IN_MEMCPY_CHK:
11932 fn = built_in_decls[BUILT_IN_MEMCPY];
11933 break;
11934 case BUILT_IN_MEMPCPY_CHK:
11935 fn = built_in_decls[BUILT_IN_MEMPCPY];
11936 break;
11937 case BUILT_IN_MEMMOVE_CHK:
11938 fn = built_in_decls[BUILT_IN_MEMMOVE];
11939 break;
11940 case BUILT_IN_MEMSET_CHK:
11941 fn = built_in_decls[BUILT_IN_MEMSET];
11942 break;
11943 default:
11944 break;
11947 if (! fn)
11948 return NULL_RTX;
11950 fn = build_call_expr (fn, 3, dest, src, len);
11951 STRIP_TYPE_NOPS (fn);
11952 while (TREE_CODE (fn) == COMPOUND_EXPR)
11954 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11955 EXPAND_NORMAL);
11956 fn = TREE_OPERAND (fn, 1);
11958 if (TREE_CODE (fn) == CALL_EXPR)
11959 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11960 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11962 else if (fcode == BUILT_IN_MEMSET_CHK)
11963 return NULL_RTX;
11964 else
11966 unsigned int dest_align
11967 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11969 /* If DEST is not a pointer type, call the normal function. */
11970 if (dest_align == 0)
11971 return NULL_RTX;
11973 /* If SRC and DEST are the same (and not volatile), do nothing. */
11974 if (operand_equal_p (src, dest, 0))
11976 tree expr;
11978 if (fcode != BUILT_IN_MEMPCPY_CHK)
11980 /* Evaluate and ignore LEN in case it has side-effects. */
11981 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11982 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11985 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11986 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11989 /* __memmove_chk special case. */
11990 if (fcode == BUILT_IN_MEMMOVE_CHK)
11992 unsigned int src_align
11993 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11995 if (src_align == 0)
11996 return NULL_RTX;
11998 /* If src is categorized for a readonly section we can use
11999 normal __memcpy_chk. */
12000 if (readonly_data_expr (src))
12002 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12003 if (!fn)
12004 return NULL_RTX;
12005 fn = build_call_expr (fn, 4, dest, src, len, size);
12006 STRIP_TYPE_NOPS (fn);
12007 while (TREE_CODE (fn) == COMPOUND_EXPR)
12009 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12010 EXPAND_NORMAL);
12011 fn = TREE_OPERAND (fn, 1);
12013 if (TREE_CODE (fn) == CALL_EXPR)
12014 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12015 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12018 return NULL_RTX;
12022 /* Emit warning if a buffer overflow is detected at compile time. */
12024 static void
12025 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12027 int is_strlen = 0;
12028 tree len, size;
12030 switch (fcode)
12032 case BUILT_IN_STRCPY_CHK:
12033 case BUILT_IN_STPCPY_CHK:
12034 /* For __strcat_chk the warning will be emitted only if overflowing
12035 by at least strlen (dest) + 1 bytes. */
12036 case BUILT_IN_STRCAT_CHK:
12037 len = CALL_EXPR_ARG (exp, 1);
12038 size = CALL_EXPR_ARG (exp, 2);
12039 is_strlen = 1;
12040 break;
12041 case BUILT_IN_STRNCAT_CHK:
12042 case BUILT_IN_STRNCPY_CHK:
12043 len = CALL_EXPR_ARG (exp, 2);
12044 size = CALL_EXPR_ARG (exp, 3);
12045 break;
12046 case BUILT_IN_SNPRINTF_CHK:
12047 case BUILT_IN_VSNPRINTF_CHK:
12048 len = CALL_EXPR_ARG (exp, 1);
12049 size = CALL_EXPR_ARG (exp, 3);
12050 break;
12051 default:
12052 gcc_unreachable ();
12055 if (!len || !size)
12056 return;
12058 if (! host_integerp (size, 1) || integer_all_onesp (size))
12059 return;
12061 if (is_strlen)
12063 len = c_strlen (len, 1);
12064 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12065 return;
12067 else if (fcode == BUILT_IN_STRNCAT_CHK)
12069 tree src = CALL_EXPR_ARG (exp, 1);
12070 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12071 return;
12072 src = c_strlen (src, 1);
12073 if (! src || ! host_integerp (src, 1))
12075 warning (0, "%Kcall to %D might overflow destination buffer",
12076 exp, get_callee_fndecl (exp));
12077 return;
12079 else if (tree_int_cst_lt (src, size))
12080 return;
12082 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12083 return;
12085 warning (0, "%Kcall to %D will always overflow destination buffer",
12086 exp, get_callee_fndecl (exp));
12089 /* Emit warning if a buffer overflow is detected at compile time
12090 in __sprintf_chk/__vsprintf_chk calls. */
12092 static void
12093 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12095 tree dest, size, len, fmt, flag;
12096 const char *fmt_str;
12097 int nargs = call_expr_nargs (exp);
12099 /* Verify the required arguments in the original call. */
12101 if (nargs < 4)
12102 return;
12103 dest = CALL_EXPR_ARG (exp, 0);
12104 flag = CALL_EXPR_ARG (exp, 1);
12105 size = CALL_EXPR_ARG (exp, 2);
12106 fmt = CALL_EXPR_ARG (exp, 3);
12108 if (! host_integerp (size, 1) || integer_all_onesp (size))
12109 return;
12111 /* Check whether the format is a literal string constant. */
12112 fmt_str = c_getstr (fmt);
12113 if (fmt_str == NULL)
12114 return;
12116 if (!init_target_chars ())
12117 return;
12119 /* If the format doesn't contain % args or %%, we know its size. */
12120 if (strchr (fmt_str, target_percent) == 0)
12121 len = build_int_cstu (size_type_node, strlen (fmt_str));
12122 /* If the format is "%s" and first ... argument is a string literal,
12123 we know it too. */
12124 else if (fcode == BUILT_IN_SPRINTF_CHK
12125 && strcmp (fmt_str, target_percent_s) == 0)
12127 tree arg;
12129 if (nargs < 5)
12130 return;
12131 arg = CALL_EXPR_ARG (exp, 4);
12132 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12133 return;
12135 len = c_strlen (arg, 1);
12136 if (!len || ! host_integerp (len, 1))
12137 return;
12139 else
12140 return;
12142 if (! tree_int_cst_lt (len, size))
12144 warning (0, "%Kcall to %D will always overflow destination buffer",
12145 exp, get_callee_fndecl (exp));
12149 /* Emit warning if a free is called with address of a variable. */
12151 static void
12152 maybe_emit_free_warning (tree exp)
12154 tree arg = CALL_EXPR_ARG (exp, 0);
12156 STRIP_NOPS (arg);
12157 if (TREE_CODE (arg) != ADDR_EXPR)
12158 return;
12160 arg = get_base_address (TREE_OPERAND (arg, 0));
12161 if (arg == NULL || INDIRECT_REF_P (arg))
12162 return;
12164 if (SSA_VAR_P (arg))
12165 warning (0, "%Kattempt to free a non-heap object %qD", exp, arg);
12166 else
12167 warning (0, "%Kattempt to free a non-heap object", exp);
12170 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12171 if possible. */
12173 tree
12174 fold_builtin_object_size (tree ptr, tree ost)
12176 tree ret = NULL_TREE;
12177 int object_size_type;
12179 if (!validate_arg (ptr, POINTER_TYPE)
12180 || !validate_arg (ost, INTEGER_TYPE))
12181 return NULL_TREE;
12183 STRIP_NOPS (ost);
12185 if (TREE_CODE (ost) != INTEGER_CST
12186 || tree_int_cst_sgn (ost) < 0
12187 || compare_tree_int (ost, 3) > 0)
12188 return NULL_TREE;
12190 object_size_type = tree_low_cst (ost, 0);
12192 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12193 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12194 and (size_t) 0 for types 2 and 3. */
12195 if (TREE_SIDE_EFFECTS (ptr))
12196 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12198 if (TREE_CODE (ptr) == ADDR_EXPR)
12199 ret = build_int_cstu (size_type_node,
12200 compute_builtin_object_size (ptr, object_size_type));
12202 else if (TREE_CODE (ptr) == SSA_NAME)
12204 unsigned HOST_WIDE_INT bytes;
12206 /* If object size is not known yet, delay folding until
12207 later. Maybe subsequent passes will help determining
12208 it. */
12209 bytes = compute_builtin_object_size (ptr, object_size_type);
12210 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12211 ? -1 : 0))
12212 ret = build_int_cstu (size_type_node, bytes);
12215 if (ret)
12217 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12218 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12219 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12220 ret = NULL_TREE;
12223 return ret;
12226 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12227 DEST, SRC, LEN, and SIZE are the arguments to the call.
12228 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12229 code of the builtin. If MAXLEN is not NULL, it is maximum length
12230 passed as third argument. */
12232 tree
12233 fold_builtin_memory_chk (tree fndecl,
12234 tree dest, tree src, tree len, tree size,
12235 tree maxlen, bool ignore,
12236 enum built_in_function fcode)
12238 tree fn;
12240 if (!validate_arg (dest, POINTER_TYPE)
12241 || !validate_arg (src,
12242 (fcode == BUILT_IN_MEMSET_CHK
12243 ? INTEGER_TYPE : POINTER_TYPE))
12244 || !validate_arg (len, INTEGER_TYPE)
12245 || !validate_arg (size, INTEGER_TYPE))
12246 return NULL_TREE;
12248 /* If SRC and DEST are the same (and not volatile), return DEST
12249 (resp. DEST+LEN for __mempcpy_chk). */
12250 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12252 if (fcode != BUILT_IN_MEMPCPY_CHK)
12253 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12254 else
12256 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12257 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12261 if (! host_integerp (size, 1))
12262 return NULL_TREE;
12264 if (! integer_all_onesp (size))
12266 if (! host_integerp (len, 1))
12268 /* If LEN is not constant, try MAXLEN too.
12269 For MAXLEN only allow optimizing into non-_ocs function
12270 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12271 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12273 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12275 /* (void) __mempcpy_chk () can be optimized into
12276 (void) __memcpy_chk (). */
12277 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12278 if (!fn)
12279 return NULL_TREE;
12281 return build_call_expr (fn, 4, dest, src, len, size);
12283 return NULL_TREE;
12286 else
12287 maxlen = len;
12289 if (tree_int_cst_lt (size, maxlen))
12290 return NULL_TREE;
12293 fn = NULL_TREE;
12294 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12295 mem{cpy,pcpy,move,set} is available. */
12296 switch (fcode)
12298 case BUILT_IN_MEMCPY_CHK:
12299 fn = built_in_decls[BUILT_IN_MEMCPY];
12300 break;
12301 case BUILT_IN_MEMPCPY_CHK:
12302 fn = built_in_decls[BUILT_IN_MEMPCPY];
12303 break;
12304 case BUILT_IN_MEMMOVE_CHK:
12305 fn = built_in_decls[BUILT_IN_MEMMOVE];
12306 break;
12307 case BUILT_IN_MEMSET_CHK:
12308 fn = built_in_decls[BUILT_IN_MEMSET];
12309 break;
12310 default:
12311 break;
12314 if (!fn)
12315 return NULL_TREE;
12317 return build_call_expr (fn, 3, dest, src, len);
12320 /* Fold a call to the __st[rp]cpy_chk builtin.
12321 DEST, SRC, and SIZE are the arguments to the call.
12322 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12323 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12324 strings passed as second argument. */
12326 tree
12327 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12328 tree maxlen, bool ignore,
12329 enum built_in_function fcode)
12331 tree len, fn;
12333 if (!validate_arg (dest, POINTER_TYPE)
12334 || !validate_arg (src, POINTER_TYPE)
12335 || !validate_arg (size, INTEGER_TYPE))
12336 return NULL_TREE;
12338 /* If SRC and DEST are the same (and not volatile), return DEST. */
12339 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12340 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12342 if (! host_integerp (size, 1))
12343 return NULL_TREE;
12345 if (! integer_all_onesp (size))
12347 len = c_strlen (src, 1);
12348 if (! len || ! host_integerp (len, 1))
12350 /* If LEN is not constant, try MAXLEN too.
12351 For MAXLEN only allow optimizing into non-_ocs function
12352 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12353 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12355 if (fcode == BUILT_IN_STPCPY_CHK)
12357 if (! ignore)
12358 return NULL_TREE;
12360 /* If return value of __stpcpy_chk is ignored,
12361 optimize into __strcpy_chk. */
12362 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12363 if (!fn)
12364 return NULL_TREE;
12366 return build_call_expr (fn, 3, dest, src, size);
12369 if (! len || TREE_SIDE_EFFECTS (len))
12370 return NULL_TREE;
12372 /* If c_strlen returned something, but not a constant,
12373 transform __strcpy_chk into __memcpy_chk. */
12374 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12375 if (!fn)
12376 return NULL_TREE;
12378 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12379 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12380 build_call_expr (fn, 4,
12381 dest, src, len, size));
12384 else
12385 maxlen = len;
12387 if (! tree_int_cst_lt (maxlen, size))
12388 return NULL_TREE;
12391 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12392 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12393 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12394 if (!fn)
12395 return NULL_TREE;
12397 return build_call_expr (fn, 2, dest, src);
12400 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12401 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12402 length passed as third argument. */
12404 tree
12405 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12406 tree maxlen)
12408 tree fn;
12410 if (!validate_arg (dest, POINTER_TYPE)
12411 || !validate_arg (src, POINTER_TYPE)
12412 || !validate_arg (len, INTEGER_TYPE)
12413 || !validate_arg (size, INTEGER_TYPE))
12414 return NULL_TREE;
12416 if (! host_integerp (size, 1))
12417 return NULL_TREE;
12419 if (! integer_all_onesp (size))
12421 if (! host_integerp (len, 1))
12423 /* If LEN is not constant, try MAXLEN too.
12424 For MAXLEN only allow optimizing into non-_ocs function
12425 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12426 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12427 return NULL_TREE;
12429 else
12430 maxlen = len;
12432 if (tree_int_cst_lt (size, maxlen))
12433 return NULL_TREE;
12436 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12437 fn = built_in_decls[BUILT_IN_STRNCPY];
12438 if (!fn)
12439 return NULL_TREE;
12441 return build_call_expr (fn, 3, dest, src, len);
12444 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12445 are the arguments to the call. */
12447 static tree
12448 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12450 tree fn;
12451 const char *p;
12453 if (!validate_arg (dest, POINTER_TYPE)
12454 || !validate_arg (src, POINTER_TYPE)
12455 || !validate_arg (size, INTEGER_TYPE))
12456 return NULL_TREE;
12458 p = c_getstr (src);
12459 /* If the SRC parameter is "", return DEST. */
12460 if (p && *p == '\0')
12461 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12463 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12464 return NULL_TREE;
12466 /* If __builtin_strcat_chk is used, assume strcat is available. */
12467 fn = built_in_decls[BUILT_IN_STRCAT];
12468 if (!fn)
12469 return NULL_TREE;
12471 return build_call_expr (fn, 2, dest, src);
12474 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12475 LEN, and SIZE. */
12477 static tree
12478 fold_builtin_strncat_chk (tree fndecl,
12479 tree dest, tree src, tree len, tree size)
12481 tree fn;
12482 const char *p;
12484 if (!validate_arg (dest, POINTER_TYPE)
12485 || !validate_arg (src, POINTER_TYPE)
12486 || !validate_arg (size, INTEGER_TYPE)
12487 || !validate_arg (size, INTEGER_TYPE))
12488 return NULL_TREE;
12490 p = c_getstr (src);
12491 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12492 if (p && *p == '\0')
12493 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12494 else if (integer_zerop (len))
12495 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12497 if (! host_integerp (size, 1))
12498 return NULL_TREE;
12500 if (! integer_all_onesp (size))
12502 tree src_len = c_strlen (src, 1);
12503 if (src_len
12504 && host_integerp (src_len, 1)
12505 && host_integerp (len, 1)
12506 && ! tree_int_cst_lt (len, src_len))
12508 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12509 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12510 if (!fn)
12511 return NULL_TREE;
12513 return build_call_expr (fn, 3, dest, src, size);
12515 return NULL_TREE;
12518 /* If __builtin_strncat_chk is used, assume strncat is available. */
12519 fn = built_in_decls[BUILT_IN_STRNCAT];
12520 if (!fn)
12521 return NULL_TREE;
12523 return build_call_expr (fn, 3, dest, src, len);
12526 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12527 a normal call should be emitted rather than expanding the function
12528 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12530 static tree
12531 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12533 tree dest, size, len, fn, fmt, flag;
12534 const char *fmt_str;
12535 int nargs = call_expr_nargs (exp);
12537 /* Verify the required arguments in the original call. */
12538 if (nargs < 4)
12539 return NULL_TREE;
12540 dest = CALL_EXPR_ARG (exp, 0);
12541 if (!validate_arg (dest, POINTER_TYPE))
12542 return NULL_TREE;
12543 flag = CALL_EXPR_ARG (exp, 1);
12544 if (!validate_arg (flag, INTEGER_TYPE))
12545 return NULL_TREE;
12546 size = CALL_EXPR_ARG (exp, 2);
12547 if (!validate_arg (size, INTEGER_TYPE))
12548 return NULL_TREE;
12549 fmt = CALL_EXPR_ARG (exp, 3);
12550 if (!validate_arg (fmt, POINTER_TYPE))
12551 return NULL_TREE;
12553 if (! host_integerp (size, 1))
12554 return NULL_TREE;
12556 len = NULL_TREE;
12558 if (!init_target_chars ())
12559 return NULL_TREE;
12561 /* Check whether the format is a literal string constant. */
12562 fmt_str = c_getstr (fmt);
12563 if (fmt_str != NULL)
12565 /* If the format doesn't contain % args or %%, we know the size. */
12566 if (strchr (fmt_str, target_percent) == 0)
12568 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12569 len = build_int_cstu (size_type_node, strlen (fmt_str));
12571 /* If the format is "%s" and first ... argument is a string literal,
12572 we know the size too. */
12573 else if (fcode == BUILT_IN_SPRINTF_CHK
12574 && strcmp (fmt_str, target_percent_s) == 0)
12576 tree arg;
12578 if (nargs == 5)
12580 arg = CALL_EXPR_ARG (exp, 4);
12581 if (validate_arg (arg, POINTER_TYPE))
12583 len = c_strlen (arg, 1);
12584 if (! len || ! host_integerp (len, 1))
12585 len = NULL_TREE;
12591 if (! integer_all_onesp (size))
12593 if (! len || ! tree_int_cst_lt (len, size))
12594 return NULL_TREE;
12597 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12598 or if format doesn't contain % chars or is "%s". */
12599 if (! integer_zerop (flag))
12601 if (fmt_str == NULL)
12602 return NULL_TREE;
12603 if (strchr (fmt_str, target_percent) != NULL
12604 && strcmp (fmt_str, target_percent_s))
12605 return NULL_TREE;
12608 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12609 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12610 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12611 if (!fn)
12612 return NULL_TREE;
12614 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12617 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12618 a normal call should be emitted rather than expanding the function
12619 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12620 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12621 passed as second argument. */
12623 tree
12624 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12625 enum built_in_function fcode)
12627 tree dest, size, len, fn, fmt, flag;
12628 const char *fmt_str;
12630 /* Verify the required arguments in the original call. */
12631 if (call_expr_nargs (exp) < 5)
12632 return NULL_TREE;
12633 dest = CALL_EXPR_ARG (exp, 0);
12634 if (!validate_arg (dest, POINTER_TYPE))
12635 return NULL_TREE;
12636 len = CALL_EXPR_ARG (exp, 1);
12637 if (!validate_arg (len, INTEGER_TYPE))
12638 return NULL_TREE;
12639 flag = CALL_EXPR_ARG (exp, 2);
12640 if (!validate_arg (flag, INTEGER_TYPE))
12641 return NULL_TREE;
12642 size = CALL_EXPR_ARG (exp, 3);
12643 if (!validate_arg (size, INTEGER_TYPE))
12644 return NULL_TREE;
12645 fmt = CALL_EXPR_ARG (exp, 4);
12646 if (!validate_arg (fmt, POINTER_TYPE))
12647 return NULL_TREE;
12649 if (! host_integerp (size, 1))
12650 return NULL_TREE;
12652 if (! integer_all_onesp (size))
12654 if (! host_integerp (len, 1))
12656 /* If LEN is not constant, try MAXLEN too.
12657 For MAXLEN only allow optimizing into non-_ocs function
12658 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12659 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12660 return NULL_TREE;
12662 else
12663 maxlen = len;
12665 if (tree_int_cst_lt (size, maxlen))
12666 return NULL_TREE;
12669 if (!init_target_chars ())
12670 return NULL_TREE;
12672 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12673 or if format doesn't contain % chars or is "%s". */
12674 if (! integer_zerop (flag))
12676 fmt_str = c_getstr (fmt);
12677 if (fmt_str == NULL)
12678 return NULL_TREE;
12679 if (strchr (fmt_str, target_percent) != NULL
12680 && strcmp (fmt_str, target_percent_s))
12681 return NULL_TREE;
12684 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12685 available. */
12686 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12687 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12688 if (!fn)
12689 return NULL_TREE;
12691 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12694 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12695 FMT and ARG are the arguments to the call; we don't fold cases with
12696 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12698 Return NULL_TREE if no simplification was possible, otherwise return the
12699 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12700 code of the function to be simplified. */
12702 static tree
12703 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12704 enum built_in_function fcode)
12706 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12707 const char *fmt_str = NULL;
12709 /* If the return value is used, don't do the transformation. */
12710 if (! ignore)
12711 return NULL_TREE;
12713 /* Verify the required arguments in the original call. */
12714 if (!validate_arg (fmt, POINTER_TYPE))
12715 return NULL_TREE;
12717 /* Check whether the format is a literal string constant. */
12718 fmt_str = c_getstr (fmt);
12719 if (fmt_str == NULL)
12720 return NULL_TREE;
12722 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12724 /* If we're using an unlocked function, assume the other
12725 unlocked functions exist explicitly. */
12726 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12727 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12729 else
12731 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12732 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12735 if (!init_target_chars ())
12736 return NULL_TREE;
12738 if (strcmp (fmt_str, target_percent_s) == 0
12739 || strchr (fmt_str, target_percent) == NULL)
12741 const char *str;
12743 if (strcmp (fmt_str, target_percent_s) == 0)
12745 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12746 return NULL_TREE;
12748 if (!arg || !validate_arg (arg, POINTER_TYPE))
12749 return NULL_TREE;
12751 str = c_getstr (arg);
12752 if (str == NULL)
12753 return NULL_TREE;
12755 else
12757 /* The format specifier doesn't contain any '%' characters. */
12758 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12759 && arg)
12760 return NULL_TREE;
12761 str = fmt_str;
12764 /* If the string was "", printf does nothing. */
12765 if (str[0] == '\0')
12766 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12768 /* If the string has length of 1, call putchar. */
12769 if (str[1] == '\0')
12771 /* Given printf("c"), (where c is any one character,)
12772 convert "c"[0] to an int and pass that to the replacement
12773 function. */
12774 newarg = build_int_cst (NULL_TREE, str[0]);
12775 if (fn_putchar)
12776 call = build_call_expr (fn_putchar, 1, newarg);
12778 else
12780 /* If the string was "string\n", call puts("string"). */
12781 size_t len = strlen (str);
12782 if ((unsigned char)str[len - 1] == target_newline)
12784 /* Create a NUL-terminated string that's one char shorter
12785 than the original, stripping off the trailing '\n'. */
12786 char *newstr = XALLOCAVEC (char, len);
12787 memcpy (newstr, str, len - 1);
12788 newstr[len - 1] = 0;
12790 newarg = build_string_literal (len, newstr);
12791 if (fn_puts)
12792 call = build_call_expr (fn_puts, 1, newarg);
12794 else
12795 /* We'd like to arrange to call fputs(string,stdout) here,
12796 but we need stdout and don't have a way to get it yet. */
12797 return NULL_TREE;
12801 /* The other optimizations can be done only on the non-va_list variants. */
12802 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12803 return NULL_TREE;
12805 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12806 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12808 if (!arg || !validate_arg (arg, POINTER_TYPE))
12809 return NULL_TREE;
12810 if (fn_puts)
12811 call = build_call_expr (fn_puts, 1, arg);
12814 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12815 else if (strcmp (fmt_str, target_percent_c) == 0)
12817 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12818 return NULL_TREE;
12819 if (fn_putchar)
12820 call = build_call_expr (fn_putchar, 1, arg);
12823 if (!call)
12824 return NULL_TREE;
12826 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12829 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12830 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12831 more than 3 arguments, and ARG may be null in the 2-argument case.
12833 Return NULL_TREE if no simplification was possible, otherwise return the
12834 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12835 code of the function to be simplified. */
12837 static tree
12838 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12839 enum built_in_function fcode)
12841 tree fn_fputc, fn_fputs, call = NULL_TREE;
12842 const char *fmt_str = NULL;
12844 /* If the return value is used, don't do the transformation. */
12845 if (! ignore)
12846 return NULL_TREE;
12848 /* Verify the required arguments in the original call. */
12849 if (!validate_arg (fp, POINTER_TYPE))
12850 return NULL_TREE;
12851 if (!validate_arg (fmt, POINTER_TYPE))
12852 return NULL_TREE;
12854 /* Check whether the format is a literal string constant. */
12855 fmt_str = c_getstr (fmt);
12856 if (fmt_str == NULL)
12857 return NULL_TREE;
12859 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12861 /* If we're using an unlocked function, assume the other
12862 unlocked functions exist explicitly. */
12863 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12864 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12866 else
12868 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12869 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12872 if (!init_target_chars ())
12873 return NULL_TREE;
12875 /* If the format doesn't contain % args or %%, use strcpy. */
12876 if (strchr (fmt_str, target_percent) == NULL)
12878 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12879 && arg)
12880 return NULL_TREE;
12882 /* If the format specifier was "", fprintf does nothing. */
12883 if (fmt_str[0] == '\0')
12885 /* If FP has side-effects, just wait until gimplification is
12886 done. */
12887 if (TREE_SIDE_EFFECTS (fp))
12888 return NULL_TREE;
12890 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12893 /* When "string" doesn't contain %, replace all cases of
12894 fprintf (fp, string) with fputs (string, fp). The fputs
12895 builtin will take care of special cases like length == 1. */
12896 if (fn_fputs)
12897 call = build_call_expr (fn_fputs, 2, fmt, fp);
12900 /* The other optimizations can be done only on the non-va_list variants. */
12901 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12902 return NULL_TREE;
12904 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12905 else if (strcmp (fmt_str, target_percent_s) == 0)
12907 if (!arg || !validate_arg (arg, POINTER_TYPE))
12908 return NULL_TREE;
12909 if (fn_fputs)
12910 call = build_call_expr (fn_fputs, 2, arg, fp);
12913 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12914 else if (strcmp (fmt_str, target_percent_c) == 0)
12916 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12917 return NULL_TREE;
12918 if (fn_fputc)
12919 call = build_call_expr (fn_fputc, 2, arg, fp);
12922 if (!call)
12923 return NULL_TREE;
12924 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12927 /* Initialize format string characters in the target charset. */
12929 static bool
12930 init_target_chars (void)
12932 static bool init;
12933 if (!init)
12935 target_newline = lang_hooks.to_target_charset ('\n');
12936 target_percent = lang_hooks.to_target_charset ('%');
12937 target_c = lang_hooks.to_target_charset ('c');
12938 target_s = lang_hooks.to_target_charset ('s');
12939 if (target_newline == 0 || target_percent == 0 || target_c == 0
12940 || target_s == 0)
12941 return false;
12943 target_percent_c[0] = target_percent;
12944 target_percent_c[1] = target_c;
12945 target_percent_c[2] = '\0';
12947 target_percent_s[0] = target_percent;
12948 target_percent_s[1] = target_s;
12949 target_percent_s[2] = '\0';
12951 target_percent_s_newline[0] = target_percent;
12952 target_percent_s_newline[1] = target_s;
12953 target_percent_s_newline[2] = target_newline;
12954 target_percent_s_newline[3] = '\0';
12956 init = true;
12958 return true;
12961 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12962 and no overflow/underflow occurred. INEXACT is true if M was not
12963 exactly calculated. TYPE is the tree type for the result. This
12964 function assumes that you cleared the MPFR flags and then
12965 calculated M to see if anything subsequently set a flag prior to
12966 entering this function. Return NULL_TREE if any checks fail. */
12968 static tree
12969 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12971 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12972 overflow/underflow occurred. If -frounding-math, proceed iff the
12973 result of calling FUNC was exact. */
12974 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12975 && (!flag_rounding_math || !inexact))
12977 REAL_VALUE_TYPE rr;
12979 real_from_mpfr (&rr, m, type, GMP_RNDN);
12980 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12981 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12982 but the mpft_t is not, then we underflowed in the
12983 conversion. */
12984 if (real_isfinite (&rr)
12985 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12987 REAL_VALUE_TYPE rmode;
12989 real_convert (&rmode, TYPE_MODE (type), &rr);
12990 /* Proceed iff the specified mode can hold the value. */
12991 if (real_identical (&rmode, &rr))
12992 return build_real (type, rmode);
12995 return NULL_TREE;
12998 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12999 FUNC on it and return the resulting value as a tree with type TYPE.
13000 If MIN and/or MAX are not NULL, then the supplied ARG must be
13001 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13002 acceptable values, otherwise they are not. The mpfr precision is
13003 set to the precision of TYPE. We assume that function FUNC returns
13004 zero if the result could be calculated exactly within the requested
13005 precision. */
13007 static tree
13008 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13009 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13010 bool inclusive)
13012 tree result = NULL_TREE;
13014 STRIP_NOPS (arg);
13016 /* To proceed, MPFR must exactly represent the target floating point
13017 format, which only happens when the target base equals two. */
13018 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13019 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13021 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13023 if (real_isfinite (ra)
13024 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13025 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13027 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13028 const int prec = fmt->p;
13029 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13030 int inexact;
13031 mpfr_t m;
13033 mpfr_init2 (m, prec);
13034 mpfr_from_real (m, ra, GMP_RNDN);
13035 mpfr_clear_flags ();
13036 inexact = func (m, m, rnd);
13037 result = do_mpfr_ckconv (m, type, inexact);
13038 mpfr_clear (m);
13042 return result;
13045 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13046 FUNC on it and return the resulting value as a tree with type TYPE.
13047 The mpfr precision is set to the precision of TYPE. We assume that
13048 function FUNC returns zero if the result could be calculated
13049 exactly within the requested precision. */
13051 static tree
13052 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13053 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13055 tree result = NULL_TREE;
13057 STRIP_NOPS (arg1);
13058 STRIP_NOPS (arg2);
13060 /* To proceed, MPFR must exactly represent the target floating point
13061 format, which only happens when the target base equals two. */
13062 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13063 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13064 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13066 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13067 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13069 if (real_isfinite (ra1) && real_isfinite (ra2))
13071 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13072 const int prec = fmt->p;
13073 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13074 int inexact;
13075 mpfr_t m1, m2;
13077 mpfr_inits2 (prec, m1, m2, NULL);
13078 mpfr_from_real (m1, ra1, GMP_RNDN);
13079 mpfr_from_real (m2, ra2, GMP_RNDN);
13080 mpfr_clear_flags ();
13081 inexact = func (m1, m1, m2, rnd);
13082 result = do_mpfr_ckconv (m1, type, inexact);
13083 mpfr_clears (m1, m2, NULL);
13087 return result;
13090 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13091 FUNC on it and return the resulting value as a tree with type TYPE.
13092 The mpfr precision is set to the precision of TYPE. We assume that
13093 function FUNC returns zero if the result could be calculated
13094 exactly within the requested precision. */
13096 static tree
13097 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13098 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13100 tree result = NULL_TREE;
13102 STRIP_NOPS (arg1);
13103 STRIP_NOPS (arg2);
13104 STRIP_NOPS (arg3);
13106 /* To proceed, MPFR must exactly represent the target floating point
13107 format, which only happens when the target base equals two. */
13108 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13109 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13110 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13111 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13113 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13114 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13115 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13117 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13119 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13120 const int prec = fmt->p;
13121 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13122 int inexact;
13123 mpfr_t m1, m2, m3;
13125 mpfr_inits2 (prec, m1, m2, m3, NULL);
13126 mpfr_from_real (m1, ra1, GMP_RNDN);
13127 mpfr_from_real (m2, ra2, GMP_RNDN);
13128 mpfr_from_real (m3, ra3, GMP_RNDN);
13129 mpfr_clear_flags ();
13130 inexact = func (m1, m1, m2, m3, rnd);
13131 result = do_mpfr_ckconv (m1, type, inexact);
13132 mpfr_clears (m1, m2, m3, NULL);
13136 return result;
13139 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13140 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13141 If ARG_SINP and ARG_COSP are NULL then the result is returned
13142 as a complex value.
13143 The type is taken from the type of ARG and is used for setting the
13144 precision of the calculation and results. */
13146 static tree
13147 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13149 tree const type = TREE_TYPE (arg);
13150 tree result = NULL_TREE;
13152 STRIP_NOPS (arg);
13154 /* To proceed, MPFR must exactly represent the target floating point
13155 format, which only happens when the target base equals two. */
13156 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13157 && TREE_CODE (arg) == REAL_CST
13158 && !TREE_OVERFLOW (arg))
13160 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13162 if (real_isfinite (ra))
13164 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13165 const int prec = fmt->p;
13166 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13167 tree result_s, result_c;
13168 int inexact;
13169 mpfr_t m, ms, mc;
13171 mpfr_inits2 (prec, m, ms, mc, NULL);
13172 mpfr_from_real (m, ra, GMP_RNDN);
13173 mpfr_clear_flags ();
13174 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13175 result_s = do_mpfr_ckconv (ms, type, inexact);
13176 result_c = do_mpfr_ckconv (mc, type, inexact);
13177 mpfr_clears (m, ms, mc, NULL);
13178 if (result_s && result_c)
13180 /* If we are to return in a complex value do so. */
13181 if (!arg_sinp && !arg_cosp)
13182 return build_complex (build_complex_type (type),
13183 result_c, result_s);
13185 /* Dereference the sin/cos pointer arguments. */
13186 arg_sinp = build_fold_indirect_ref (arg_sinp);
13187 arg_cosp = build_fold_indirect_ref (arg_cosp);
13188 /* Proceed if valid pointer type were passed in. */
13189 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13190 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13192 /* Set the values. */
13193 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13194 result_s);
13195 TREE_SIDE_EFFECTS (result_s) = 1;
13196 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13197 result_c);
13198 TREE_SIDE_EFFECTS (result_c) = 1;
13199 /* Combine the assignments into a compound expr. */
13200 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13201 result_s, result_c));
13206 return result;
13209 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13210 two-argument mpfr order N Bessel function FUNC on them and return
13211 the resulting value as a tree with type TYPE. The mpfr precision
13212 is set to the precision of TYPE. We assume that function FUNC
13213 returns zero if the result could be calculated exactly within the
13214 requested precision. */
13215 static tree
13216 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13217 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13218 const REAL_VALUE_TYPE *min, bool inclusive)
13220 tree result = NULL_TREE;
13222 STRIP_NOPS (arg1);
13223 STRIP_NOPS (arg2);
13225 /* To proceed, MPFR must exactly represent the target floating point
13226 format, which only happens when the target base equals two. */
13227 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13228 && host_integerp (arg1, 0)
13229 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13231 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13232 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13234 if (n == (long)n
13235 && real_isfinite (ra)
13236 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13238 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13239 const int prec = fmt->p;
13240 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13241 int inexact;
13242 mpfr_t m;
13244 mpfr_init2 (m, prec);
13245 mpfr_from_real (m, ra, GMP_RNDN);
13246 mpfr_clear_flags ();
13247 inexact = func (m, n, m, rnd);
13248 result = do_mpfr_ckconv (m, type, inexact);
13249 mpfr_clear (m);
13253 return result;
13256 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13257 the pointer *(ARG_QUO) and return the result. The type is taken
13258 from the type of ARG0 and is used for setting the precision of the
13259 calculation and results. */
13261 static tree
13262 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13264 tree const type = TREE_TYPE (arg0);
13265 tree result = NULL_TREE;
13267 STRIP_NOPS (arg0);
13268 STRIP_NOPS (arg1);
13270 /* To proceed, MPFR must exactly represent the target floating point
13271 format, which only happens when the target base equals two. */
13272 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13273 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13274 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13276 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13277 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13279 if (real_isfinite (ra0) && real_isfinite (ra1))
13281 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13282 const int prec = fmt->p;
13283 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13284 tree result_rem;
13285 long integer_quo;
13286 mpfr_t m0, m1;
13288 mpfr_inits2 (prec, m0, m1, NULL);
13289 mpfr_from_real (m0, ra0, GMP_RNDN);
13290 mpfr_from_real (m1, ra1, GMP_RNDN);
13291 mpfr_clear_flags ();
13292 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13293 /* Remquo is independent of the rounding mode, so pass
13294 inexact=0 to do_mpfr_ckconv(). */
13295 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13296 mpfr_clears (m0, m1, NULL);
13297 if (result_rem)
13299 /* MPFR calculates quo in the host's long so it may
13300 return more bits in quo than the target int can hold
13301 if sizeof(host long) > sizeof(target int). This can
13302 happen even for native compilers in LP64 mode. In
13303 these cases, modulo the quo value with the largest
13304 number that the target int can hold while leaving one
13305 bit for the sign. */
13306 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13307 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13309 /* Dereference the quo pointer argument. */
13310 arg_quo = build_fold_indirect_ref (arg_quo);
13311 /* Proceed iff a valid pointer type was passed in. */
13312 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13314 /* Set the value. */
13315 tree result_quo = fold_build2 (MODIFY_EXPR,
13316 TREE_TYPE (arg_quo), arg_quo,
13317 build_int_cst (NULL, integer_quo));
13318 TREE_SIDE_EFFECTS (result_quo) = 1;
13319 /* Combine the quo assignment with the rem. */
13320 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13321 result_quo, result_rem));
13326 return result;
13329 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13330 resulting value as a tree with type TYPE. The mpfr precision is
13331 set to the precision of TYPE. We assume that this mpfr function
13332 returns zero if the result could be calculated exactly within the
13333 requested precision. In addition, the integer pointer represented
13334 by ARG_SG will be dereferenced and set to the appropriate signgam
13335 (-1,1) value. */
13337 static tree
13338 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13340 tree result = NULL_TREE;
13342 STRIP_NOPS (arg);
13344 /* To proceed, MPFR must exactly represent the target floating point
13345 format, which only happens when the target base equals two. Also
13346 verify ARG is a constant and that ARG_SG is an int pointer. */
13347 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13348 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13349 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13350 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13352 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13354 /* In addition to NaN and Inf, the argument cannot be zero or a
13355 negative integer. */
13356 if (real_isfinite (ra)
13357 && ra->cl != rvc_zero
13358 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13360 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13361 const int prec = fmt->p;
13362 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13363 int inexact, sg;
13364 mpfr_t m;
13365 tree result_lg;
13367 mpfr_init2 (m, prec);
13368 mpfr_from_real (m, ra, GMP_RNDN);
13369 mpfr_clear_flags ();
13370 inexact = mpfr_lgamma (m, &sg, m, rnd);
13371 result_lg = do_mpfr_ckconv (m, type, inexact);
13372 mpfr_clear (m);
13373 if (result_lg)
13375 tree result_sg;
13377 /* Dereference the arg_sg pointer argument. */
13378 arg_sg = build_fold_indirect_ref (arg_sg);
13379 /* Assign the signgam value into *arg_sg. */
13380 result_sg = fold_build2 (MODIFY_EXPR,
13381 TREE_TYPE (arg_sg), arg_sg,
13382 build_int_cst (NULL, sg));
13383 TREE_SIDE_EFFECTS (result_sg) = 1;
13384 /* Combine the signgam assignment with the lgamma result. */
13385 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13386 result_sg, result_lg));
13391 return result;
13394 /* FIXME tuples.
13395 The functions below provide an alternate interface for folding
13396 builtin function calls presented as GIMPLE_CALL statements rather
13397 than as CALL_EXPRs. The folded result is still expressed as a
13398 tree. There is too much code duplication in the handling of
13399 varargs functions, and a more intrusive re-factoring would permit
13400 better sharing of code between the tree and statement-based
13401 versions of these functions. */
13403 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13404 along with N new arguments specified as the "..." parameters. SKIP
13405 is the number of arguments in STMT to be omitted. This function is used
13406 to do varargs-to-varargs transformations. */
13408 static tree
13409 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13411 int oldnargs = gimple_call_num_args (stmt);
13412 int nargs = oldnargs - skip + n;
13413 tree fntype = TREE_TYPE (fndecl);
13414 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13415 tree *buffer;
13416 int i, j;
13417 va_list ap;
13419 buffer = XALLOCAVEC (tree, nargs);
13420 va_start (ap, n);
13421 for (i = 0; i < n; i++)
13422 buffer[i] = va_arg (ap, tree);
13423 va_end (ap);
13424 for (j = skip; j < oldnargs; j++, i++)
13425 buffer[i] = gimple_call_arg (stmt, j);
13427 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13430 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13431 a normal call should be emitted rather than expanding the function
13432 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13434 static tree
13435 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13437 tree dest, size, len, fn, fmt, flag;
13438 const char *fmt_str;
13439 int nargs = gimple_call_num_args (stmt);
13441 /* Verify the required arguments in the original call. */
13442 if (nargs < 4)
13443 return NULL_TREE;
13444 dest = gimple_call_arg (stmt, 0);
13445 if (!validate_arg (dest, POINTER_TYPE))
13446 return NULL_TREE;
13447 flag = gimple_call_arg (stmt, 1);
13448 if (!validate_arg (flag, INTEGER_TYPE))
13449 return NULL_TREE;
13450 size = gimple_call_arg (stmt, 2);
13451 if (!validate_arg (size, INTEGER_TYPE))
13452 return NULL_TREE;
13453 fmt = gimple_call_arg (stmt, 3);
13454 if (!validate_arg (fmt, POINTER_TYPE))
13455 return NULL_TREE;
13457 if (! host_integerp (size, 1))
13458 return NULL_TREE;
13460 len = NULL_TREE;
13462 if (!init_target_chars ())
13463 return NULL_TREE;
13465 /* Check whether the format is a literal string constant. */
13466 fmt_str = c_getstr (fmt);
13467 if (fmt_str != NULL)
13469 /* If the format doesn't contain % args or %%, we know the size. */
13470 if (strchr (fmt_str, target_percent) == 0)
13472 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13473 len = build_int_cstu (size_type_node, strlen (fmt_str));
13475 /* If the format is "%s" and first ... argument is a string literal,
13476 we know the size too. */
13477 else if (fcode == BUILT_IN_SPRINTF_CHK
13478 && strcmp (fmt_str, target_percent_s) == 0)
13480 tree arg;
13482 if (nargs == 5)
13484 arg = gimple_call_arg (stmt, 4);
13485 if (validate_arg (arg, POINTER_TYPE))
13487 len = c_strlen (arg, 1);
13488 if (! len || ! host_integerp (len, 1))
13489 len = NULL_TREE;
13495 if (! integer_all_onesp (size))
13497 if (! len || ! tree_int_cst_lt (len, size))
13498 return NULL_TREE;
13501 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13502 or if format doesn't contain % chars or is "%s". */
13503 if (! integer_zerop (flag))
13505 if (fmt_str == NULL)
13506 return NULL_TREE;
13507 if (strchr (fmt_str, target_percent) != NULL
13508 && strcmp (fmt_str, target_percent_s))
13509 return NULL_TREE;
13512 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13513 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13514 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13515 if (!fn)
13516 return NULL_TREE;
13518 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13521 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13522 a normal call should be emitted rather than expanding the function
13523 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13524 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13525 passed as second argument. */
13527 tree
13528 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13529 enum built_in_function fcode)
13531 tree dest, size, len, fn, fmt, flag;
13532 const char *fmt_str;
13534 /* Verify the required arguments in the original call. */
13535 if (gimple_call_num_args (stmt) < 5)
13536 return NULL_TREE;
13537 dest = gimple_call_arg (stmt, 0);
13538 if (!validate_arg (dest, POINTER_TYPE))
13539 return NULL_TREE;
13540 len = gimple_call_arg (stmt, 1);
13541 if (!validate_arg (len, INTEGER_TYPE))
13542 return NULL_TREE;
13543 flag = gimple_call_arg (stmt, 2);
13544 if (!validate_arg (flag, INTEGER_TYPE))
13545 return NULL_TREE;
13546 size = gimple_call_arg (stmt, 3);
13547 if (!validate_arg (size, INTEGER_TYPE))
13548 return NULL_TREE;
13549 fmt = gimple_call_arg (stmt, 4);
13550 if (!validate_arg (fmt, POINTER_TYPE))
13551 return NULL_TREE;
13553 if (! host_integerp (size, 1))
13554 return NULL_TREE;
13556 if (! integer_all_onesp (size))
13558 if (! host_integerp (len, 1))
13560 /* If LEN is not constant, try MAXLEN too.
13561 For MAXLEN only allow optimizing into non-_ocs function
13562 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13563 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13564 return NULL_TREE;
13566 else
13567 maxlen = len;
13569 if (tree_int_cst_lt (size, maxlen))
13570 return NULL_TREE;
13573 if (!init_target_chars ())
13574 return NULL_TREE;
13576 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13577 or if format doesn't contain % chars or is "%s". */
13578 if (! integer_zerop (flag))
13580 fmt_str = c_getstr (fmt);
13581 if (fmt_str == NULL)
13582 return NULL_TREE;
13583 if (strchr (fmt_str, target_percent) != NULL
13584 && strcmp (fmt_str, target_percent_s))
13585 return NULL_TREE;
13588 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13589 available. */
13590 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13591 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13592 if (!fn)
13593 return NULL_TREE;
13595 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13598 /* Builtins with folding operations that operate on "..." arguments
13599 need special handling; we need to store the arguments in a convenient
13600 data structure before attempting any folding. Fortunately there are
13601 only a few builtins that fall into this category. FNDECL is the
13602 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13603 result of the function call is ignored. */
13605 static tree
13606 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13608 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13609 tree ret = NULL_TREE;
13611 switch (fcode)
13613 case BUILT_IN_SPRINTF_CHK:
13614 case BUILT_IN_VSPRINTF_CHK:
13615 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13616 break;
13618 case BUILT_IN_SNPRINTF_CHK:
13619 case BUILT_IN_VSNPRINTF_CHK:
13620 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13622 default:
13623 break;
13625 if (ret)
13627 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13628 TREE_NO_WARNING (ret) = 1;
13629 return ret;
13631 return NULL_TREE;
13634 /* A wrapper function for builtin folding that prevents warnings for
13635 "statement without effect" and the like, caused by removing the
13636 call node earlier than the warning is generated. */
13638 tree
13639 fold_call_stmt (gimple stmt, bool ignore)
13641 tree ret = NULL_TREE;
13642 tree fndecl = gimple_call_fndecl (stmt);
13643 if (fndecl
13644 && TREE_CODE (fndecl) == FUNCTION_DECL
13645 && DECL_BUILT_IN (fndecl)
13646 && !gimple_call_va_arg_pack_p (stmt))
13648 int nargs = gimple_call_num_args (stmt);
13650 /* FIXME: Don't use a list in this interface. */
13651 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13653 tree arglist = NULL_TREE;
13654 int i;
13655 for (i = nargs - 1; i >= 0; i--)
13656 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13657 return targetm.fold_builtin (fndecl, arglist, ignore);
13659 else
13661 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13663 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13664 int i;
13665 for (i = 0; i < nargs; i++)
13666 args[i] = gimple_call_arg (stmt, i);
13667 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13669 if (!ret)
13670 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13671 if (ret)
13673 /* Propagate location information from original call to
13674 expansion of builtin. Otherwise things like
13675 maybe_emit_chk_warning, that operate on the expansion
13676 of a builtin, will use the wrong location information. */
13677 if (gimple_has_location (stmt))
13679 tree realret = ret;
13680 if (TREE_CODE (ret) == NOP_EXPR)
13681 realret = TREE_OPERAND (ret, 0);
13682 if (CAN_HAVE_LOCATION_P (realret)
13683 && !EXPR_HAS_LOCATION (realret))
13684 SET_EXPR_LOCATION (realret, gimple_location (stmt));
13685 return realret;
13687 return ret;
13691 return NULL_TREE;