2009-03-30 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / builtins.c
blobe22c5f58964dd20bcb8581938e4888c584330af9
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[4]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names[(int) END_BUILTINS] =
69 #include "builtins.def"
71 #undef DEF_BUILTIN
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 tree built_in_decls[(int) END_BUILTINS];
76 /* Declarations used when constructing the builtin implicitly in the compiler.
77 It may be NULL_TREE when this is invalid (for instance runtime is not
78 required to implement the function call in all cases). */
79 tree implicit_built_in_decls[(int) END_BUILTINS];
81 static const char *c_getstr (tree);
82 static rtx c_readstr (const char *, enum machine_mode);
83 static int target_char_cast (tree, char *);
84 static rtx get_memory_rtx (tree, tree);
85 static int apply_args_size (void);
86 static int apply_result_size (void);
87 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
88 static rtx result_vector (int, rtx);
89 #endif
90 static void expand_builtin_update_setjmp_buf (rtx);
91 static void expand_builtin_prefetch (tree);
92 static rtx expand_builtin_apply_args (void);
93 static rtx expand_builtin_apply_args_1 (void);
94 static rtx expand_builtin_apply (rtx, rtx, rtx);
95 static void expand_builtin_return (rtx);
96 static enum type_class type_to_class (tree);
97 static rtx expand_builtin_classify_type (tree);
98 static void expand_errno_check (tree, rtx);
99 static rtx expand_builtin_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_args_info (tree);
108 static rtx expand_builtin_next_arg (void);
109 static rtx expand_builtin_va_start (tree);
110 static rtx expand_builtin_va_end (tree);
111 static rtx expand_builtin_va_copy (tree);
112 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
126 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_bcopy (tree, int);
129 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
131 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
133 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
134 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_alloca (tree, rtx);
143 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
144 static rtx expand_builtin_frame_address (tree, tree);
145 static rtx expand_builtin_fputs (tree, rtx, bool);
146 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
147 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
148 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
149 static tree stabilize_va_list (tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static tree fold_builtin_constant_p (tree);
152 static tree fold_builtin_expect (tree, tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (tree);
155 static tree fold_builtin_inf (tree, int);
156 static tree fold_builtin_nan (tree, tree, int);
157 static tree rewrite_call_expr (tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static bool integer_valued_real_p (tree);
160 static tree fold_trunc_transparent_mathfn (tree, tree);
161 static bool readonly_data_expr (tree);
162 static rtx expand_builtin_fabs (tree, rtx, rtx);
163 static rtx expand_builtin_signbit (tree, rtx);
164 static tree fold_builtin_sqrt (tree, tree);
165 static tree fold_builtin_cbrt (tree, tree);
166 static tree fold_builtin_pow (tree, tree, tree, tree);
167 static tree fold_builtin_powi (tree, tree, tree, tree);
168 static tree fold_builtin_cos (tree, tree, tree);
169 static tree fold_builtin_cosh (tree, tree, tree);
170 static tree fold_builtin_tan (tree, tree);
171 static tree fold_builtin_trunc (tree, tree);
172 static tree fold_builtin_floor (tree, tree);
173 static tree fold_builtin_ceil (tree, tree);
174 static tree fold_builtin_round (tree, tree);
175 static tree fold_builtin_int_roundingfn (tree, tree);
176 static tree fold_builtin_bitop (tree, tree);
177 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
178 static tree fold_builtin_strchr (tree, tree, tree);
179 static tree fold_builtin_memchr (tree, tree, tree, tree);
180 static tree fold_builtin_memcmp (tree, tree, tree);
181 static tree fold_builtin_strcmp (tree, tree);
182 static tree fold_builtin_strncmp (tree, tree, tree);
183 static tree fold_builtin_signbit (tree, tree);
184 static tree fold_builtin_copysign (tree, tree, tree, tree);
185 static tree fold_builtin_isascii (tree);
186 static tree fold_builtin_toascii (tree);
187 static tree fold_builtin_isdigit (tree);
188 static tree fold_builtin_fabs (tree, tree);
189 static tree fold_builtin_abs (tree, tree);
190 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
191 enum tree_code);
192 static tree fold_builtin_n (tree, tree *, int, bool);
193 static tree fold_builtin_0 (tree, bool);
194 static tree fold_builtin_1 (tree, tree, bool);
195 static tree fold_builtin_2 (tree, tree, tree, bool);
196 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
197 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
198 static tree fold_builtin_varargs (tree, tree, bool);
200 static tree fold_builtin_strpbrk (tree, tree, tree);
201 static tree fold_builtin_strstr (tree, tree, tree);
202 static tree fold_builtin_strrchr (tree, tree, tree);
203 static tree fold_builtin_strcat (tree, tree);
204 static tree fold_builtin_strncat (tree, tree, tree);
205 static tree fold_builtin_strspn (tree, tree);
206 static tree fold_builtin_strcspn (tree, tree);
207 static tree fold_builtin_sprintf (tree, tree, tree, int);
209 static rtx expand_builtin_object_size (tree);
210 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
211 enum built_in_function);
212 static void maybe_emit_chk_warning (tree, enum built_in_function);
213 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
214 static void maybe_emit_free_warning (tree);
215 static tree fold_builtin_object_size (tree, tree);
216 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
217 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
218 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
219 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
220 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
221 enum built_in_function);
222 static bool init_target_chars (void);
224 static unsigned HOST_WIDE_INT target_newline;
225 static unsigned HOST_WIDE_INT target_percent;
226 static unsigned HOST_WIDE_INT target_c;
227 static unsigned HOST_WIDE_INT target_s;
228 static char target_percent_c[3];
229 static char target_percent_s[3];
230 static char target_percent_s_newline[4];
231 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
232 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
233 static tree do_mpfr_arg2 (tree, tree, tree,
234 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
235 static tree do_mpfr_arg3 (tree, tree, tree, tree,
236 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
237 static tree do_mpfr_sincos (tree, tree, tree);
238 static tree do_mpfr_bessel_n (tree, tree, tree,
239 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
240 const REAL_VALUE_TYPE *, bool);
241 static tree do_mpfr_remquo (tree, tree, tree);
242 static tree do_mpfr_lgamma_r (tree, tree, tree);
244 /* Return true if NODE should be considered for inline expansion regardless
245 of the optimization level. This means whenever a function is invoked with
246 its "internal" name, which normally contains the prefix "__builtin". */
248 static bool called_as_built_in (tree node)
250 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
251 if (strncmp (name, "__builtin_", 10) == 0)
252 return true;
253 if (strncmp (name, "__sync_", 7) == 0)
254 return true;
255 return false;
258 /* Return the alignment in bits of EXP, an object.
259 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
260 guessed alignment e.g. from type alignment. */
263 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
265 unsigned int inner;
267 inner = max_align;
268 if (handled_component_p (exp))
270 HOST_WIDE_INT bitsize, bitpos;
271 tree offset;
272 enum machine_mode mode;
273 int unsignedp, volatilep;
275 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
276 &mode, &unsignedp, &volatilep, true);
277 if (bitpos)
278 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
279 while (offset)
281 tree next_offset;
283 if (TREE_CODE (offset) == PLUS_EXPR)
285 next_offset = TREE_OPERAND (offset, 0);
286 offset = TREE_OPERAND (offset, 1);
288 else
289 next_offset = NULL;
290 if (host_integerp (offset, 1))
292 /* Any overflow in calculating offset_bits won't change
293 the alignment. */
294 unsigned offset_bits
295 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
297 if (offset_bits)
298 inner = MIN (inner, (offset_bits & -offset_bits));
300 else if (TREE_CODE (offset) == MULT_EXPR
301 && host_integerp (TREE_OPERAND (offset, 1), 1))
303 /* Any overflow in calculating offset_factor won't change
304 the alignment. */
305 unsigned offset_factor
306 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
307 * BITS_PER_UNIT);
309 if (offset_factor)
310 inner = MIN (inner, (offset_factor & -offset_factor));
312 else
314 inner = MIN (inner, BITS_PER_UNIT);
315 break;
317 offset = next_offset;
320 if (DECL_P (exp))
321 align = MIN (inner, DECL_ALIGN (exp));
322 #ifdef CONSTANT_ALIGNMENT
323 else if (CONSTANT_CLASS_P (exp))
324 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
325 #endif
326 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
327 || TREE_CODE (exp) == INDIRECT_REF)
328 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
329 else
330 align = MIN (align, inner);
331 return MIN (align, max_align);
334 /* Return the alignment in bits of EXP, a pointer valued expression.
335 But don't return more than MAX_ALIGN no matter what.
336 The alignment returned is, by default, the alignment of the thing that
337 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
339 Otherwise, look at the expression to see if we can do better, i.e., if the
340 expression is actually pointing at an object whose alignment is tighter. */
343 get_pointer_alignment (tree exp, unsigned int max_align)
345 unsigned int align, inner;
347 /* We rely on TER to compute accurate alignment information. */
348 if (!(optimize && flag_tree_ter))
349 return 0;
351 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
352 return 0;
354 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
355 align = MIN (align, max_align);
357 while (1)
359 switch (TREE_CODE (exp))
361 CASE_CONVERT:
362 exp = TREE_OPERAND (exp, 0);
363 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
364 return align;
366 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
367 align = MIN (inner, max_align);
368 break;
370 case POINTER_PLUS_EXPR:
371 /* If sum of pointer + int, restrict our maximum alignment to that
372 imposed by the integer. If not, we can't do any better than
373 ALIGN. */
374 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
375 return align;
377 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
378 & (max_align / BITS_PER_UNIT - 1))
379 != 0)
380 max_align >>= 1;
382 exp = TREE_OPERAND (exp, 0);
383 break;
385 case ADDR_EXPR:
386 /* See what we are pointing at and look at its alignment. */
387 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
389 default:
390 return align;
395 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
396 way, because it could contain a zero byte in the middle.
397 TREE_STRING_LENGTH is the size of the character array, not the string.
399 ONLY_VALUE should be nonzero if the result is not going to be emitted
400 into the instruction stream and zero if it is going to be expanded.
401 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
402 is returned, otherwise NULL, since
403 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
404 evaluate the side-effects.
406 The value returned is of type `ssizetype'.
408 Unfortunately, string_constant can't access the values of const char
409 arrays with initializers, so neither can we do so here. */
411 tree
412 c_strlen (tree src, int only_value)
414 tree offset_node;
415 HOST_WIDE_INT offset;
416 int max;
417 const char *ptr;
419 STRIP_NOPS (src);
420 if (TREE_CODE (src) == COND_EXPR
421 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
423 tree len1, len2;
425 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
426 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
427 if (tree_int_cst_equal (len1, len2))
428 return len1;
431 if (TREE_CODE (src) == COMPOUND_EXPR
432 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
433 return c_strlen (TREE_OPERAND (src, 1), only_value);
435 src = string_constant (src, &offset_node);
436 if (src == 0)
437 return NULL_TREE;
439 max = TREE_STRING_LENGTH (src) - 1;
440 ptr = TREE_STRING_POINTER (src);
442 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
444 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
445 compute the offset to the following null if we don't know where to
446 start searching for it. */
447 int i;
449 for (i = 0; i < max; i++)
450 if (ptr[i] == 0)
451 return NULL_TREE;
453 /* We don't know the starting offset, but we do know that the string
454 has no internal zero bytes. We can assume that the offset falls
455 within the bounds of the string; otherwise, the programmer deserves
456 what he gets. Subtract the offset from the length of the string,
457 and return that. This would perhaps not be valid if we were dealing
458 with named arrays in addition to literal string constants. */
460 return size_diffop (size_int (max), offset_node);
463 /* We have a known offset into the string. Start searching there for
464 a null character if we can represent it as a single HOST_WIDE_INT. */
465 if (offset_node == 0)
466 offset = 0;
467 else if (! host_integerp (offset_node, 0))
468 offset = -1;
469 else
470 offset = tree_low_cst (offset_node, 0);
472 /* If the offset is known to be out of bounds, the front-end should
473 have warned already. We call strlen at runtime.
475 ??? Perhaps we should turn this into an assert and force
476 front-ends to define offsets whtin boundaries. */
477 if (offset < 0 || offset > max)
479 return NULL_TREE;
482 /* Use strlen to search for the first zero byte. Since any strings
483 constructed with build_string will have nulls appended, we win even
484 if we get handed something like (char[4])"abcd".
486 Since OFFSET is our starting index into the string, no further
487 calculation is needed. */
488 return ssize_int (strlen (ptr + offset));
491 /* Return a char pointer for a C string if it is a string constant
492 or sum of string constant and integer constant. */
494 static const char *
495 c_getstr (tree src)
497 tree offset_node;
499 src = string_constant (src, &offset_node);
500 if (src == 0)
501 return 0;
503 if (offset_node == 0)
504 return TREE_STRING_POINTER (src);
505 else if (!host_integerp (offset_node, 1)
506 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
507 return 0;
509 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
512 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
513 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
515 static rtx
516 c_readstr (const char *str, enum machine_mode mode)
518 HOST_WIDE_INT c[2];
519 HOST_WIDE_INT ch;
520 unsigned int i, j;
522 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
524 c[0] = 0;
525 c[1] = 0;
526 ch = 1;
527 for (i = 0; i < GET_MODE_SIZE (mode); i++)
529 j = i;
530 if (WORDS_BIG_ENDIAN)
531 j = GET_MODE_SIZE (mode) - i - 1;
532 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
533 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
534 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
535 j *= BITS_PER_UNIT;
536 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
538 if (ch)
539 ch = (unsigned char) str[i];
540 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
542 return immed_double_const (c[0], c[1], mode);
545 /* Cast a target constant CST to target CHAR and if that value fits into
546 host char type, return zero and put that value into variable pointed to by
547 P. */
549 static int
550 target_char_cast (tree cst, char *p)
552 unsigned HOST_WIDE_INT val, hostval;
554 if (!host_integerp (cst, 1)
555 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
556 return 1;
558 val = tree_low_cst (cst, 1);
559 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
560 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
562 hostval = val;
563 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
564 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
566 if (val != hostval)
567 return 1;
569 *p = hostval;
570 return 0;
573 /* Similar to save_expr, but assumes that arbitrary code is not executed
574 in between the multiple evaluations. In particular, we assume that a
575 non-addressable local variable will not be modified. */
577 static tree
578 builtin_save_expr (tree exp)
580 if (TREE_ADDRESSABLE (exp) == 0
581 && (TREE_CODE (exp) == PARM_DECL
582 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
583 return exp;
585 return save_expr (exp);
588 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
589 times to get the address of either a higher stack frame, or a return
590 address located within it (depending on FNDECL_CODE). */
592 static rtx
593 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
595 int i;
597 #ifdef INITIAL_FRAME_ADDRESS_RTX
598 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
599 #else
600 rtx tem;
602 /* For a zero count with __builtin_return_address, we don't care what
603 frame address we return, because target-specific definitions will
604 override us. Therefore frame pointer elimination is OK, and using
605 the soft frame pointer is OK.
607 For a nonzero count, or a zero count with __builtin_frame_address,
608 we require a stable offset from the current frame pointer to the
609 previous one, so we must use the hard frame pointer, and
610 we must disable frame pointer elimination. */
611 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
612 tem = frame_pointer_rtx;
613 else
615 tem = hard_frame_pointer_rtx;
617 /* Tell reload not to eliminate the frame pointer. */
618 crtl->accesses_prior_frames = 1;
620 #endif
622 /* Some machines need special handling before we can access
623 arbitrary frames. For example, on the SPARC, we must first flush
624 all register windows to the stack. */
625 #ifdef SETUP_FRAME_ADDRESSES
626 if (count > 0)
627 SETUP_FRAME_ADDRESSES ();
628 #endif
630 /* On the SPARC, the return address is not in the frame, it is in a
631 register. There is no way to access it off of the current frame
632 pointer, but it can be accessed off the previous frame pointer by
633 reading the value from the register window save area. */
634 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
635 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
636 count--;
637 #endif
639 /* Scan back COUNT frames to the specified frame. */
640 for (i = 0; i < count; i++)
642 /* Assume the dynamic chain pointer is in the word that the
643 frame address points to, unless otherwise specified. */
644 #ifdef DYNAMIC_CHAIN_ADDRESS
645 tem = DYNAMIC_CHAIN_ADDRESS (tem);
646 #endif
647 tem = memory_address (Pmode, tem);
648 tem = gen_frame_mem (Pmode, tem);
649 tem = copy_to_reg (tem);
652 /* For __builtin_frame_address, return what we've got. But, on
653 the SPARC for example, we may have to add a bias. */
654 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
655 #ifdef FRAME_ADDR_RTX
656 return FRAME_ADDR_RTX (tem);
657 #else
658 return tem;
659 #endif
661 /* For __builtin_return_address, get the return address from that frame. */
662 #ifdef RETURN_ADDR_RTX
663 tem = RETURN_ADDR_RTX (count, tem);
664 #else
665 tem = memory_address (Pmode,
666 plus_constant (tem, GET_MODE_SIZE (Pmode)));
667 tem = gen_frame_mem (Pmode, tem);
668 #endif
669 return tem;
672 /* Alias set used for setjmp buffer. */
673 static alias_set_type setjmp_alias_set = -1;
675 /* Construct the leading half of a __builtin_setjmp call. Control will
676 return to RECEIVER_LABEL. This is also called directly by the SJLJ
677 exception handling code. */
679 void
680 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
682 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
683 rtx stack_save;
684 rtx mem;
686 if (setjmp_alias_set == -1)
687 setjmp_alias_set = new_alias_set ();
689 buf_addr = convert_memory_address (Pmode, buf_addr);
691 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
693 /* We store the frame pointer and the address of receiver_label in
694 the buffer and use the rest of it for the stack save area, which
695 is machine-dependent. */
697 mem = gen_rtx_MEM (Pmode, buf_addr);
698 set_mem_alias_set (mem, setjmp_alias_set);
699 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
701 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
702 set_mem_alias_set (mem, setjmp_alias_set);
704 emit_move_insn (validize_mem (mem),
705 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
707 stack_save = gen_rtx_MEM (sa_mode,
708 plus_constant (buf_addr,
709 2 * GET_MODE_SIZE (Pmode)));
710 set_mem_alias_set (stack_save, setjmp_alias_set);
711 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
713 /* If there is further processing to do, do it. */
714 #ifdef HAVE_builtin_setjmp_setup
715 if (HAVE_builtin_setjmp_setup)
716 emit_insn (gen_builtin_setjmp_setup (buf_addr));
717 #endif
719 /* Tell optimize_save_area_alloca that extra work is going to
720 need to go on during alloca. */
721 cfun->calls_setjmp = 1;
723 /* We have a nonlocal label. */
724 cfun->has_nonlocal_label = 1;
727 /* Construct the trailing part of a __builtin_setjmp call. This is
728 also called directly by the SJLJ exception handling code. */
730 void
731 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
733 /* Clobber the FP when we get here, so we have to make sure it's
734 marked as used by this function. */
735 emit_use (hard_frame_pointer_rtx);
737 /* Mark the static chain as clobbered here so life information
738 doesn't get messed up for it. */
739 emit_clobber (static_chain_rtx);
741 /* Now put in the code to restore the frame pointer, and argument
742 pointer, if needed. */
743 #ifdef HAVE_nonlocal_goto
744 if (! HAVE_nonlocal_goto)
745 #endif
747 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
748 /* This might change the hard frame pointer in ways that aren't
749 apparent to early optimization passes, so force a clobber. */
750 emit_clobber (hard_frame_pointer_rtx);
753 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
754 if (fixed_regs[ARG_POINTER_REGNUM])
756 #ifdef ELIMINABLE_REGS
757 size_t i;
758 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
760 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
761 if (elim_regs[i].from == ARG_POINTER_REGNUM
762 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
763 break;
765 if (i == ARRAY_SIZE (elim_regs))
766 #endif
768 /* Now restore our arg pointer from the address at which it
769 was saved in our stack frame. */
770 emit_move_insn (crtl->args.internal_arg_pointer,
771 copy_to_reg (get_arg_pointer_save_area ()));
774 #endif
776 #ifdef HAVE_builtin_setjmp_receiver
777 if (HAVE_builtin_setjmp_receiver)
778 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
779 else
780 #endif
781 #ifdef HAVE_nonlocal_goto_receiver
782 if (HAVE_nonlocal_goto_receiver)
783 emit_insn (gen_nonlocal_goto_receiver ());
784 else
785 #endif
786 { /* Nothing */ }
788 /* We must not allow the code we just generated to be reordered by
789 scheduling. Specifically, the update of the frame pointer must
790 happen immediately, not later. */
791 emit_insn (gen_blockage ());
794 /* __builtin_longjmp is passed a pointer to an array of five words (not
795 all will be used on all machines). It operates similarly to the C
796 library function of the same name, but is more efficient. Much of
797 the code below is copied from the handling of non-local gotos. */
799 static void
800 expand_builtin_longjmp (rtx buf_addr, rtx value)
802 rtx fp, lab, stack, insn, last;
803 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
805 /* DRAP is needed for stack realign if longjmp is expanded to current
806 function */
807 if (SUPPORTS_STACK_ALIGNMENT)
808 crtl->need_drap = true;
810 if (setjmp_alias_set == -1)
811 setjmp_alias_set = new_alias_set ();
813 buf_addr = convert_memory_address (Pmode, buf_addr);
815 buf_addr = force_reg (Pmode, buf_addr);
817 /* We used to store value in static_chain_rtx, but that fails if pointers
818 are smaller than integers. We instead require that the user must pass
819 a second argument of 1, because that is what builtin_setjmp will
820 return. This also makes EH slightly more efficient, since we are no
821 longer copying around a value that we don't care about. */
822 gcc_assert (value == const1_rtx);
824 last = get_last_insn ();
825 #ifdef HAVE_builtin_longjmp
826 if (HAVE_builtin_longjmp)
827 emit_insn (gen_builtin_longjmp (buf_addr));
828 else
829 #endif
831 fp = gen_rtx_MEM (Pmode, buf_addr);
832 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
833 GET_MODE_SIZE (Pmode)));
835 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
836 2 * GET_MODE_SIZE (Pmode)));
837 set_mem_alias_set (fp, setjmp_alias_set);
838 set_mem_alias_set (lab, setjmp_alias_set);
839 set_mem_alias_set (stack, setjmp_alias_set);
841 /* Pick up FP, label, and SP from the block and jump. This code is
842 from expand_goto in stmt.c; see there for detailed comments. */
843 #ifdef HAVE_nonlocal_goto
844 if (HAVE_nonlocal_goto)
845 /* We have to pass a value to the nonlocal_goto pattern that will
846 get copied into the static_chain pointer, but it does not matter
847 what that value is, because builtin_setjmp does not use it. */
848 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
849 else
850 #endif
852 lab = copy_to_reg (lab);
854 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
855 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
857 emit_move_insn (hard_frame_pointer_rtx, fp);
858 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
860 emit_use (hard_frame_pointer_rtx);
861 emit_use (stack_pointer_rtx);
862 emit_indirect_jump (lab);
866 /* Search backwards and mark the jump insn as a non-local goto.
867 Note that this precludes the use of __builtin_longjmp to a
868 __builtin_setjmp target in the same function. However, we've
869 already cautioned the user that these functions are for
870 internal exception handling use only. */
871 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
873 gcc_assert (insn != last);
875 if (JUMP_P (insn))
877 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
878 break;
880 else if (CALL_P (insn))
881 break;
885 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
886 and the address of the save area. */
888 static rtx
889 expand_builtin_nonlocal_goto (tree exp)
891 tree t_label, t_save_area;
892 rtx r_label, r_save_area, r_fp, r_sp, insn;
894 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
895 return NULL_RTX;
897 t_label = CALL_EXPR_ARG (exp, 0);
898 t_save_area = CALL_EXPR_ARG (exp, 1);
900 r_label = expand_normal (t_label);
901 r_label = convert_memory_address (Pmode, r_label);
902 r_save_area = expand_normal (t_save_area);
903 r_save_area = convert_memory_address (Pmode, r_save_area);
904 /* Copy the address of the save location to a register just in case it was based
905 on the frame pointer. */
906 r_save_area = copy_to_reg (r_save_area);
907 r_fp = gen_rtx_MEM (Pmode, r_save_area);
908 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
909 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
911 crtl->has_nonlocal_goto = 1;
913 #ifdef HAVE_nonlocal_goto
914 /* ??? We no longer need to pass the static chain value, afaik. */
915 if (HAVE_nonlocal_goto)
916 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
917 else
918 #endif
920 r_label = copy_to_reg (r_label);
922 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
923 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
925 /* Restore frame pointer for containing function.
926 This sets the actual hard register used for the frame pointer
927 to the location of the function's incoming static chain info.
928 The non-local goto handler will then adjust it to contain the
929 proper value and reload the argument pointer, if needed. */
930 emit_move_insn (hard_frame_pointer_rtx, r_fp);
931 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
933 /* USE of hard_frame_pointer_rtx added for consistency;
934 not clear if really needed. */
935 emit_use (hard_frame_pointer_rtx);
936 emit_use (stack_pointer_rtx);
938 /* If the architecture is using a GP register, we must
939 conservatively assume that the target function makes use of it.
940 The prologue of functions with nonlocal gotos must therefore
941 initialize the GP register to the appropriate value, and we
942 must then make sure that this value is live at the point
943 of the jump. (Note that this doesn't necessarily apply
944 to targets with a nonlocal_goto pattern; they are free
945 to implement it in their own way. Note also that this is
946 a no-op if the GP register is a global invariant.) */
947 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
948 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
949 emit_use (pic_offset_table_rtx);
951 emit_indirect_jump (r_label);
954 /* Search backwards to the jump insn and mark it as a
955 non-local goto. */
956 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
958 if (JUMP_P (insn))
960 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
961 break;
963 else if (CALL_P (insn))
964 break;
967 return const0_rtx;
970 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
971 (not all will be used on all machines) that was passed to __builtin_setjmp.
972 It updates the stack pointer in that block to correspond to the current
973 stack pointer. */
975 static void
976 expand_builtin_update_setjmp_buf (rtx buf_addr)
978 enum machine_mode sa_mode = Pmode;
979 rtx stack_save;
982 #ifdef HAVE_save_stack_nonlocal
983 if (HAVE_save_stack_nonlocal)
984 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
985 #endif
986 #ifdef STACK_SAVEAREA_MODE
987 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
988 #endif
990 stack_save
991 = gen_rtx_MEM (sa_mode,
992 memory_address
993 (sa_mode,
994 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
996 #ifdef HAVE_setjmp
997 if (HAVE_setjmp)
998 emit_insn (gen_setjmp ());
999 #endif
1001 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1004 /* Expand a call to __builtin_prefetch. For a target that does not support
1005 data prefetch, evaluate the memory address argument in case it has side
1006 effects. */
1008 static void
1009 expand_builtin_prefetch (tree exp)
1011 tree arg0, arg1, arg2;
1012 int nargs;
1013 rtx op0, op1, op2;
1015 if (!validate_arglist (exp, POINTER_TYPE, 0))
1016 return;
1018 arg0 = CALL_EXPR_ARG (exp, 0);
1020 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1021 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1022 locality). */
1023 nargs = call_expr_nargs (exp);
1024 if (nargs > 1)
1025 arg1 = CALL_EXPR_ARG (exp, 1);
1026 else
1027 arg1 = integer_zero_node;
1028 if (nargs > 2)
1029 arg2 = CALL_EXPR_ARG (exp, 2);
1030 else
1031 arg2 = build_int_cst (NULL_TREE, 3);
1033 /* Argument 0 is an address. */
1034 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1036 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1037 if (TREE_CODE (arg1) != INTEGER_CST)
1039 error ("second argument to %<__builtin_prefetch%> must be a constant");
1040 arg1 = integer_zero_node;
1042 op1 = expand_normal (arg1);
1043 /* Argument 1 must be either zero or one. */
1044 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1046 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1047 " using zero");
1048 op1 = const0_rtx;
1051 /* Argument 2 (locality) must be a compile-time constant int. */
1052 if (TREE_CODE (arg2) != INTEGER_CST)
1054 error ("third argument to %<__builtin_prefetch%> must be a constant");
1055 arg2 = integer_zero_node;
1057 op2 = expand_normal (arg2);
1058 /* Argument 2 must be 0, 1, 2, or 3. */
1059 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1061 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1062 op2 = const0_rtx;
1065 #ifdef HAVE_prefetch
1066 if (HAVE_prefetch)
1068 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1069 (op0,
1070 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1071 || (GET_MODE (op0) != Pmode))
1073 op0 = convert_memory_address (Pmode, op0);
1074 op0 = force_reg (Pmode, op0);
1076 emit_insn (gen_prefetch (op0, op1, op2));
1078 #endif
1080 /* Don't do anything with direct references to volatile memory, but
1081 generate code to handle other side effects. */
1082 if (!MEM_P (op0) && side_effects_p (op0))
1083 emit_insn (op0);
1086 /* Get a MEM rtx for expression EXP which is the address of an operand
1087 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1088 the maximum length of the block of memory that might be accessed or
1089 NULL if unknown. */
1091 static rtx
1092 get_memory_rtx (tree exp, tree len)
1094 tree orig_exp = exp;
1095 rtx addr, mem;
1096 HOST_WIDE_INT off;
1098 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1099 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1100 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1101 exp = TREE_OPERAND (exp, 0);
1103 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1104 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1106 /* Get an expression we can use to find the attributes to assign to MEM.
1107 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1108 we can. First remove any nops. */
1109 while (CONVERT_EXPR_P (exp)
1110 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1111 exp = TREE_OPERAND (exp, 0);
1113 off = 0;
1114 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1115 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1116 && host_integerp (TREE_OPERAND (exp, 1), 0)
1117 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1118 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1119 else if (TREE_CODE (exp) == ADDR_EXPR)
1120 exp = TREE_OPERAND (exp, 0);
1121 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1122 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1123 else
1124 exp = NULL;
1126 /* Honor attributes derived from exp, except for the alias set
1127 (as builtin stringops may alias with anything) and the size
1128 (as stringops may access multiple array elements). */
1129 if (exp)
1131 set_mem_attributes (mem, exp, 0);
1133 if (off)
1134 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1136 /* Allow the string and memory builtins to overflow from one
1137 field into another, see http://gcc.gnu.org/PR23561.
1138 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1139 memory accessed by the string or memory builtin will fit
1140 within the field. */
1141 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1143 tree mem_expr = MEM_EXPR (mem);
1144 HOST_WIDE_INT offset = -1, length = -1;
1145 tree inner = exp;
1147 while (TREE_CODE (inner) == ARRAY_REF
1148 || CONVERT_EXPR_P (inner)
1149 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1150 || TREE_CODE (inner) == SAVE_EXPR)
1151 inner = TREE_OPERAND (inner, 0);
1153 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1155 if (MEM_OFFSET (mem)
1156 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1157 offset = INTVAL (MEM_OFFSET (mem));
1159 if (offset >= 0 && len && host_integerp (len, 0))
1160 length = tree_low_cst (len, 0);
1162 while (TREE_CODE (inner) == COMPONENT_REF)
1164 tree field = TREE_OPERAND (inner, 1);
1165 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1166 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1168 /* Bitfields are generally not byte-addressable. */
1169 gcc_assert (!DECL_BIT_FIELD (field)
1170 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1171 % BITS_PER_UNIT) == 0
1172 && host_integerp (DECL_SIZE (field), 0)
1173 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1174 % BITS_PER_UNIT) == 0));
1176 /* If we can prove that the memory starting at XEXP (mem, 0) and
1177 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1178 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1179 fields without DECL_SIZE_UNIT like flexible array members. */
1180 if (length >= 0
1181 && DECL_SIZE_UNIT (field)
1182 && host_integerp (DECL_SIZE_UNIT (field), 0))
1184 HOST_WIDE_INT size
1185 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1186 if (offset <= size
1187 && length <= size
1188 && offset + length <= size)
1189 break;
1192 if (offset >= 0
1193 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1194 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1195 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1196 / BITS_PER_UNIT;
1197 else
1199 offset = -1;
1200 length = -1;
1203 mem_expr = TREE_OPERAND (mem_expr, 0);
1204 inner = TREE_OPERAND (inner, 0);
1207 if (mem_expr == NULL)
1208 offset = -1;
1209 if (mem_expr != MEM_EXPR (mem))
1211 set_mem_expr (mem, mem_expr);
1212 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1215 set_mem_alias_set (mem, 0);
1216 set_mem_size (mem, NULL_RTX);
1219 return mem;
1222 /* Built-in functions to perform an untyped call and return. */
1224 /* For each register that may be used for calling a function, this
1225 gives a mode used to copy the register's value. VOIDmode indicates
1226 the register is not used for calling a function. If the machine
1227 has register windows, this gives only the outbound registers.
1228 INCOMING_REGNO gives the corresponding inbound register. */
1229 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1231 /* For each register that may be used for returning values, this gives
1232 a mode used to copy the register's value. VOIDmode indicates the
1233 register is not used for returning values. If the machine has
1234 register windows, this gives only the outbound registers.
1235 INCOMING_REGNO gives the corresponding inbound register. */
1236 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1238 /* For each register that may be used for calling a function, this
1239 gives the offset of that register into the block returned by
1240 __builtin_apply_args. 0 indicates that the register is not
1241 used for calling a function. */
1242 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1244 /* Return the size required for the block returned by __builtin_apply_args,
1245 and initialize apply_args_mode. */
1247 static int
1248 apply_args_size (void)
1250 static int size = -1;
1251 int align;
1252 unsigned int regno;
1253 enum machine_mode mode;
1255 /* The values computed by this function never change. */
1256 if (size < 0)
1258 /* The first value is the incoming arg-pointer. */
1259 size = GET_MODE_SIZE (Pmode);
1261 /* The second value is the structure value address unless this is
1262 passed as an "invisible" first argument. */
1263 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1264 size += GET_MODE_SIZE (Pmode);
1266 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1267 if (FUNCTION_ARG_REGNO_P (regno))
1269 mode = reg_raw_mode[regno];
1271 gcc_assert (mode != VOIDmode);
1273 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1274 if (size % align != 0)
1275 size = CEIL (size, align) * align;
1276 apply_args_reg_offset[regno] = size;
1277 size += GET_MODE_SIZE (mode);
1278 apply_args_mode[regno] = mode;
1280 else
1282 apply_args_mode[regno] = VOIDmode;
1283 apply_args_reg_offset[regno] = 0;
1286 return size;
1289 /* Return the size required for the block returned by __builtin_apply,
1290 and initialize apply_result_mode. */
1292 static int
1293 apply_result_size (void)
1295 static int size = -1;
1296 int align, regno;
1297 enum machine_mode mode;
1299 /* The values computed by this function never change. */
1300 if (size < 0)
1302 size = 0;
1304 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1305 if (FUNCTION_VALUE_REGNO_P (regno))
1307 mode = reg_raw_mode[regno];
1309 gcc_assert (mode != VOIDmode);
1311 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1312 if (size % align != 0)
1313 size = CEIL (size, align) * align;
1314 size += GET_MODE_SIZE (mode);
1315 apply_result_mode[regno] = mode;
1317 else
1318 apply_result_mode[regno] = VOIDmode;
1320 /* Allow targets that use untyped_call and untyped_return to override
1321 the size so that machine-specific information can be stored here. */
1322 #ifdef APPLY_RESULT_SIZE
1323 size = APPLY_RESULT_SIZE;
1324 #endif
1326 return size;
1329 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1330 /* Create a vector describing the result block RESULT. If SAVEP is true,
1331 the result block is used to save the values; otherwise it is used to
1332 restore the values. */
1334 static rtx
1335 result_vector (int savep, rtx result)
1337 int regno, size, align, nelts;
1338 enum machine_mode mode;
1339 rtx reg, mem;
1340 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1342 size = nelts = 0;
1343 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1344 if ((mode = apply_result_mode[regno]) != VOIDmode)
1346 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1347 if (size % align != 0)
1348 size = CEIL (size, align) * align;
1349 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1350 mem = adjust_address (result, mode, size);
1351 savevec[nelts++] = (savep
1352 ? gen_rtx_SET (VOIDmode, mem, reg)
1353 : gen_rtx_SET (VOIDmode, reg, mem));
1354 size += GET_MODE_SIZE (mode);
1356 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1358 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1360 /* Save the state required to perform an untyped call with the same
1361 arguments as were passed to the current function. */
1363 static rtx
1364 expand_builtin_apply_args_1 (void)
1366 rtx registers, tem;
1367 int size, align, regno;
1368 enum machine_mode mode;
1369 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1371 /* Create a block where the arg-pointer, structure value address,
1372 and argument registers can be saved. */
1373 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1375 /* Walk past the arg-pointer and structure value address. */
1376 size = GET_MODE_SIZE (Pmode);
1377 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1378 size += GET_MODE_SIZE (Pmode);
1380 /* Save each register used in calling a function to the block. */
1381 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1382 if ((mode = apply_args_mode[regno]) != VOIDmode)
1384 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1385 if (size % align != 0)
1386 size = CEIL (size, align) * align;
1388 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1390 emit_move_insn (adjust_address (registers, mode, size), tem);
1391 size += GET_MODE_SIZE (mode);
1394 /* Save the arg pointer to the block. */
1395 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1396 #ifdef STACK_GROWS_DOWNWARD
1397 /* We need the pointer as the caller actually passed them to us, not
1398 as we might have pretended they were passed. Make sure it's a valid
1399 operand, as emit_move_insn isn't expected to handle a PLUS. */
1401 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1402 NULL_RTX);
1403 #endif
1404 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1406 size = GET_MODE_SIZE (Pmode);
1408 /* Save the structure value address unless this is passed as an
1409 "invisible" first argument. */
1410 if (struct_incoming_value)
1412 emit_move_insn (adjust_address (registers, Pmode, size),
1413 copy_to_reg (struct_incoming_value));
1414 size += GET_MODE_SIZE (Pmode);
1417 /* Return the address of the block. */
1418 return copy_addr_to_reg (XEXP (registers, 0));
1421 /* __builtin_apply_args returns block of memory allocated on
1422 the stack into which is stored the arg pointer, structure
1423 value address, static chain, and all the registers that might
1424 possibly be used in performing a function call. The code is
1425 moved to the start of the function so the incoming values are
1426 saved. */
1428 static rtx
1429 expand_builtin_apply_args (void)
1431 /* Don't do __builtin_apply_args more than once in a function.
1432 Save the result of the first call and reuse it. */
1433 if (apply_args_value != 0)
1434 return apply_args_value;
1436 /* When this function is called, it means that registers must be
1437 saved on entry to this function. So we migrate the
1438 call to the first insn of this function. */
1439 rtx temp;
1440 rtx seq;
1442 start_sequence ();
1443 temp = expand_builtin_apply_args_1 ();
1444 seq = get_insns ();
1445 end_sequence ();
1447 apply_args_value = temp;
1449 /* Put the insns after the NOTE that starts the function.
1450 If this is inside a start_sequence, make the outer-level insn
1451 chain current, so the code is placed at the start of the
1452 function. If internal_arg_pointer is a non-virtual pseudo,
1453 it needs to be placed after the function that initializes
1454 that pseudo. */
1455 push_topmost_sequence ();
1456 if (REG_P (crtl->args.internal_arg_pointer)
1457 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1458 emit_insn_before (seq, parm_birth_insn);
1459 else
1460 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1461 pop_topmost_sequence ();
1462 return temp;
1466 /* Perform an untyped call and save the state required to perform an
1467 untyped return of whatever value was returned by the given function. */
1469 static rtx
1470 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1472 int size, align, regno;
1473 enum machine_mode mode;
1474 rtx incoming_args, result, reg, dest, src, call_insn;
1475 rtx old_stack_level = 0;
1476 rtx call_fusage = 0;
1477 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1479 arguments = convert_memory_address (Pmode, arguments);
1481 /* Create a block where the return registers can be saved. */
1482 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1484 /* Fetch the arg pointer from the ARGUMENTS block. */
1485 incoming_args = gen_reg_rtx (Pmode);
1486 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1487 #ifndef STACK_GROWS_DOWNWARD
1488 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1489 incoming_args, 0, OPTAB_LIB_WIDEN);
1490 #endif
1492 /* Push a new argument block and copy the arguments. Do not allow
1493 the (potential) memcpy call below to interfere with our stack
1494 manipulations. */
1495 do_pending_stack_adjust ();
1496 NO_DEFER_POP;
1498 /* Save the stack with nonlocal if available. */
1499 #ifdef HAVE_save_stack_nonlocal
1500 if (HAVE_save_stack_nonlocal)
1501 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1502 else
1503 #endif
1504 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1506 /* Allocate a block of memory onto the stack and copy the memory
1507 arguments to the outgoing arguments address. */
1508 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1510 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1511 may have already set current_function_calls_alloca to true.
1512 current_function_calls_alloca won't be set if argsize is zero,
1513 so we have to guarantee need_drap is true here. */
1514 if (SUPPORTS_STACK_ALIGNMENT)
1515 crtl->need_drap = true;
1517 dest = virtual_outgoing_args_rtx;
1518 #ifndef STACK_GROWS_DOWNWARD
1519 if (GET_CODE (argsize) == CONST_INT)
1520 dest = plus_constant (dest, -INTVAL (argsize));
1521 else
1522 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1523 #endif
1524 dest = gen_rtx_MEM (BLKmode, dest);
1525 set_mem_align (dest, PARM_BOUNDARY);
1526 src = gen_rtx_MEM (BLKmode, incoming_args);
1527 set_mem_align (src, PARM_BOUNDARY);
1528 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1530 /* Refer to the argument block. */
1531 apply_args_size ();
1532 arguments = gen_rtx_MEM (BLKmode, arguments);
1533 set_mem_align (arguments, PARM_BOUNDARY);
1535 /* Walk past the arg-pointer and structure value address. */
1536 size = GET_MODE_SIZE (Pmode);
1537 if (struct_value)
1538 size += GET_MODE_SIZE (Pmode);
1540 /* Restore each of the registers previously saved. Make USE insns
1541 for each of these registers for use in making the call. */
1542 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1543 if ((mode = apply_args_mode[regno]) != VOIDmode)
1545 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1546 if (size % align != 0)
1547 size = CEIL (size, align) * align;
1548 reg = gen_rtx_REG (mode, regno);
1549 emit_move_insn (reg, adjust_address (arguments, mode, size));
1550 use_reg (&call_fusage, reg);
1551 size += GET_MODE_SIZE (mode);
1554 /* Restore the structure value address unless this is passed as an
1555 "invisible" first argument. */
1556 size = GET_MODE_SIZE (Pmode);
1557 if (struct_value)
1559 rtx value = gen_reg_rtx (Pmode);
1560 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1561 emit_move_insn (struct_value, value);
1562 if (REG_P (struct_value))
1563 use_reg (&call_fusage, struct_value);
1564 size += GET_MODE_SIZE (Pmode);
1567 /* All arguments and registers used for the call are set up by now! */
1568 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1570 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1571 and we don't want to load it into a register as an optimization,
1572 because prepare_call_address already did it if it should be done. */
1573 if (GET_CODE (function) != SYMBOL_REF)
1574 function = memory_address (FUNCTION_MODE, function);
1576 /* Generate the actual call instruction and save the return value. */
1577 #ifdef HAVE_untyped_call
1578 if (HAVE_untyped_call)
1579 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1580 result, result_vector (1, result)));
1581 else
1582 #endif
1583 #ifdef HAVE_call_value
1584 if (HAVE_call_value)
1586 rtx valreg = 0;
1588 /* Locate the unique return register. It is not possible to
1589 express a call that sets more than one return register using
1590 call_value; use untyped_call for that. In fact, untyped_call
1591 only needs to save the return registers in the given block. */
1592 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1593 if ((mode = apply_result_mode[regno]) != VOIDmode)
1595 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1597 valreg = gen_rtx_REG (mode, regno);
1600 emit_call_insn (GEN_CALL_VALUE (valreg,
1601 gen_rtx_MEM (FUNCTION_MODE, function),
1602 const0_rtx, NULL_RTX, const0_rtx));
1604 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1606 else
1607 #endif
1608 gcc_unreachable ();
1610 /* Find the CALL insn we just emitted, and attach the register usage
1611 information. */
1612 call_insn = last_call_insn ();
1613 add_function_usage_to (call_insn, call_fusage);
1615 /* Restore the stack. */
1616 #ifdef HAVE_save_stack_nonlocal
1617 if (HAVE_save_stack_nonlocal)
1618 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1619 else
1620 #endif
1621 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1623 OK_DEFER_POP;
1625 /* Return the address of the result block. */
1626 result = copy_addr_to_reg (XEXP (result, 0));
1627 return convert_memory_address (ptr_mode, result);
1630 /* Perform an untyped return. */
1632 static void
1633 expand_builtin_return (rtx result)
1635 int size, align, regno;
1636 enum machine_mode mode;
1637 rtx reg;
1638 rtx call_fusage = 0;
1640 result = convert_memory_address (Pmode, result);
1642 apply_result_size ();
1643 result = gen_rtx_MEM (BLKmode, result);
1645 #ifdef HAVE_untyped_return
1646 if (HAVE_untyped_return)
1648 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1649 emit_barrier ();
1650 return;
1652 #endif
1654 /* Restore the return value and note that each value is used. */
1655 size = 0;
1656 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1657 if ((mode = apply_result_mode[regno]) != VOIDmode)
1659 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1660 if (size % align != 0)
1661 size = CEIL (size, align) * align;
1662 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1663 emit_move_insn (reg, adjust_address (result, mode, size));
1665 push_to_sequence (call_fusage);
1666 emit_use (reg);
1667 call_fusage = get_insns ();
1668 end_sequence ();
1669 size += GET_MODE_SIZE (mode);
1672 /* Put the USE insns before the return. */
1673 emit_insn (call_fusage);
1675 /* Return whatever values was restored by jumping directly to the end
1676 of the function. */
1677 expand_naked_return ();
1680 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1682 static enum type_class
1683 type_to_class (tree type)
1685 switch (TREE_CODE (type))
1687 case VOID_TYPE: return void_type_class;
1688 case INTEGER_TYPE: return integer_type_class;
1689 case ENUMERAL_TYPE: return enumeral_type_class;
1690 case BOOLEAN_TYPE: return boolean_type_class;
1691 case POINTER_TYPE: return pointer_type_class;
1692 case REFERENCE_TYPE: return reference_type_class;
1693 case OFFSET_TYPE: return offset_type_class;
1694 case REAL_TYPE: return real_type_class;
1695 case COMPLEX_TYPE: return complex_type_class;
1696 case FUNCTION_TYPE: return function_type_class;
1697 case METHOD_TYPE: return method_type_class;
1698 case RECORD_TYPE: return record_type_class;
1699 case UNION_TYPE:
1700 case QUAL_UNION_TYPE: return union_type_class;
1701 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1702 ? string_type_class : array_type_class);
1703 case LANG_TYPE: return lang_type_class;
1704 default: return no_type_class;
1708 /* Expand a call EXP to __builtin_classify_type. */
1710 static rtx
1711 expand_builtin_classify_type (tree exp)
1713 if (call_expr_nargs (exp))
1714 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1715 return GEN_INT (no_type_class);
1718 /* This helper macro, meant to be used in mathfn_built_in below,
1719 determines which among a set of three builtin math functions is
1720 appropriate for a given type mode. The `F' and `L' cases are
1721 automatically generated from the `double' case. */
1722 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1723 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1724 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1725 fcodel = BUILT_IN_MATHFN##L ; break;
1726 /* Similar to above, but appends _R after any F/L suffix. */
1727 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1728 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1729 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1730 fcodel = BUILT_IN_MATHFN##L_R ; break;
1732 /* Return mathematic function equivalent to FN but operating directly
1733 on TYPE, if available. If IMPLICIT is true find the function in
1734 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1735 can't do the conversion, return zero. */
1737 static tree
1738 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1740 tree const *const fn_arr
1741 = implicit ? implicit_built_in_decls : built_in_decls;
1742 enum built_in_function fcode, fcodef, fcodel;
1744 switch (fn)
1746 CASE_MATHFN (BUILT_IN_ACOS)
1747 CASE_MATHFN (BUILT_IN_ACOSH)
1748 CASE_MATHFN (BUILT_IN_ASIN)
1749 CASE_MATHFN (BUILT_IN_ASINH)
1750 CASE_MATHFN (BUILT_IN_ATAN)
1751 CASE_MATHFN (BUILT_IN_ATAN2)
1752 CASE_MATHFN (BUILT_IN_ATANH)
1753 CASE_MATHFN (BUILT_IN_CBRT)
1754 CASE_MATHFN (BUILT_IN_CEIL)
1755 CASE_MATHFN (BUILT_IN_CEXPI)
1756 CASE_MATHFN (BUILT_IN_COPYSIGN)
1757 CASE_MATHFN (BUILT_IN_COS)
1758 CASE_MATHFN (BUILT_IN_COSH)
1759 CASE_MATHFN (BUILT_IN_DREM)
1760 CASE_MATHFN (BUILT_IN_ERF)
1761 CASE_MATHFN (BUILT_IN_ERFC)
1762 CASE_MATHFN (BUILT_IN_EXP)
1763 CASE_MATHFN (BUILT_IN_EXP10)
1764 CASE_MATHFN (BUILT_IN_EXP2)
1765 CASE_MATHFN (BUILT_IN_EXPM1)
1766 CASE_MATHFN (BUILT_IN_FABS)
1767 CASE_MATHFN (BUILT_IN_FDIM)
1768 CASE_MATHFN (BUILT_IN_FLOOR)
1769 CASE_MATHFN (BUILT_IN_FMA)
1770 CASE_MATHFN (BUILT_IN_FMAX)
1771 CASE_MATHFN (BUILT_IN_FMIN)
1772 CASE_MATHFN (BUILT_IN_FMOD)
1773 CASE_MATHFN (BUILT_IN_FREXP)
1774 CASE_MATHFN (BUILT_IN_GAMMA)
1775 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1776 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1777 CASE_MATHFN (BUILT_IN_HYPOT)
1778 CASE_MATHFN (BUILT_IN_ILOGB)
1779 CASE_MATHFN (BUILT_IN_INF)
1780 CASE_MATHFN (BUILT_IN_ISINF)
1781 CASE_MATHFN (BUILT_IN_J0)
1782 CASE_MATHFN (BUILT_IN_J1)
1783 CASE_MATHFN (BUILT_IN_JN)
1784 CASE_MATHFN (BUILT_IN_LCEIL)
1785 CASE_MATHFN (BUILT_IN_LDEXP)
1786 CASE_MATHFN (BUILT_IN_LFLOOR)
1787 CASE_MATHFN (BUILT_IN_LGAMMA)
1788 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1789 CASE_MATHFN (BUILT_IN_LLCEIL)
1790 CASE_MATHFN (BUILT_IN_LLFLOOR)
1791 CASE_MATHFN (BUILT_IN_LLRINT)
1792 CASE_MATHFN (BUILT_IN_LLROUND)
1793 CASE_MATHFN (BUILT_IN_LOG)
1794 CASE_MATHFN (BUILT_IN_LOG10)
1795 CASE_MATHFN (BUILT_IN_LOG1P)
1796 CASE_MATHFN (BUILT_IN_LOG2)
1797 CASE_MATHFN (BUILT_IN_LOGB)
1798 CASE_MATHFN (BUILT_IN_LRINT)
1799 CASE_MATHFN (BUILT_IN_LROUND)
1800 CASE_MATHFN (BUILT_IN_MODF)
1801 CASE_MATHFN (BUILT_IN_NAN)
1802 CASE_MATHFN (BUILT_IN_NANS)
1803 CASE_MATHFN (BUILT_IN_NEARBYINT)
1804 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1805 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1806 CASE_MATHFN (BUILT_IN_POW)
1807 CASE_MATHFN (BUILT_IN_POWI)
1808 CASE_MATHFN (BUILT_IN_POW10)
1809 CASE_MATHFN (BUILT_IN_REMAINDER)
1810 CASE_MATHFN (BUILT_IN_REMQUO)
1811 CASE_MATHFN (BUILT_IN_RINT)
1812 CASE_MATHFN (BUILT_IN_ROUND)
1813 CASE_MATHFN (BUILT_IN_SCALB)
1814 CASE_MATHFN (BUILT_IN_SCALBLN)
1815 CASE_MATHFN (BUILT_IN_SCALBN)
1816 CASE_MATHFN (BUILT_IN_SIGNBIT)
1817 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1818 CASE_MATHFN (BUILT_IN_SIN)
1819 CASE_MATHFN (BUILT_IN_SINCOS)
1820 CASE_MATHFN (BUILT_IN_SINH)
1821 CASE_MATHFN (BUILT_IN_SQRT)
1822 CASE_MATHFN (BUILT_IN_TAN)
1823 CASE_MATHFN (BUILT_IN_TANH)
1824 CASE_MATHFN (BUILT_IN_TGAMMA)
1825 CASE_MATHFN (BUILT_IN_TRUNC)
1826 CASE_MATHFN (BUILT_IN_Y0)
1827 CASE_MATHFN (BUILT_IN_Y1)
1828 CASE_MATHFN (BUILT_IN_YN)
1830 default:
1831 return NULL_TREE;
1834 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1835 return fn_arr[fcode];
1836 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1837 return fn_arr[fcodef];
1838 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1839 return fn_arr[fcodel];
1840 else
1841 return NULL_TREE;
1844 /* Like mathfn_built_in_1(), but always use the implicit array. */
1846 tree
1847 mathfn_built_in (tree type, enum built_in_function fn)
1849 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1852 /* If errno must be maintained, expand the RTL to check if the result,
1853 TARGET, of a built-in function call, EXP, is NaN, and if so set
1854 errno to EDOM. */
1856 static void
1857 expand_errno_check (tree exp, rtx target)
1859 rtx lab = gen_label_rtx ();
1861 /* Test the result; if it is NaN, set errno=EDOM because
1862 the argument was not in the domain. */
1863 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1864 0, lab);
1866 #ifdef TARGET_EDOM
1867 /* If this built-in doesn't throw an exception, set errno directly. */
1868 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1870 #ifdef GEN_ERRNO_RTX
1871 rtx errno_rtx = GEN_ERRNO_RTX;
1872 #else
1873 rtx errno_rtx
1874 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1875 #endif
1876 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1877 emit_label (lab);
1878 return;
1880 #endif
1882 /* Make sure the library call isn't expanded as a tail call. */
1883 CALL_EXPR_TAILCALL (exp) = 0;
1885 /* We can't set errno=EDOM directly; let the library call do it.
1886 Pop the arguments right away in case the call gets deleted. */
1887 NO_DEFER_POP;
1888 expand_call (exp, target, 0);
1889 OK_DEFER_POP;
1890 emit_label (lab);
1893 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1894 Return NULL_RTX if a normal call should be emitted rather than expanding
1895 the function in-line. EXP is the expression that is a call to the builtin
1896 function; if convenient, the result should be placed in TARGET.
1897 SUBTARGET may be used as the target for computing one of EXP's operands. */
1899 static rtx
1900 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1902 optab builtin_optab;
1903 rtx op0, insns, before_call;
1904 tree fndecl = get_callee_fndecl (exp);
1905 enum machine_mode mode;
1906 bool errno_set = false;
1907 tree arg;
1909 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1910 return NULL_RTX;
1912 arg = CALL_EXPR_ARG (exp, 0);
1914 switch (DECL_FUNCTION_CODE (fndecl))
1916 CASE_FLT_FN (BUILT_IN_SQRT):
1917 errno_set = ! tree_expr_nonnegative_p (arg);
1918 builtin_optab = sqrt_optab;
1919 break;
1920 CASE_FLT_FN (BUILT_IN_EXP):
1921 errno_set = true; builtin_optab = exp_optab; break;
1922 CASE_FLT_FN (BUILT_IN_EXP10):
1923 CASE_FLT_FN (BUILT_IN_POW10):
1924 errno_set = true; builtin_optab = exp10_optab; break;
1925 CASE_FLT_FN (BUILT_IN_EXP2):
1926 errno_set = true; builtin_optab = exp2_optab; break;
1927 CASE_FLT_FN (BUILT_IN_EXPM1):
1928 errno_set = true; builtin_optab = expm1_optab; break;
1929 CASE_FLT_FN (BUILT_IN_LOGB):
1930 errno_set = true; builtin_optab = logb_optab; break;
1931 CASE_FLT_FN (BUILT_IN_LOG):
1932 errno_set = true; builtin_optab = log_optab; break;
1933 CASE_FLT_FN (BUILT_IN_LOG10):
1934 errno_set = true; builtin_optab = log10_optab; break;
1935 CASE_FLT_FN (BUILT_IN_LOG2):
1936 errno_set = true; builtin_optab = log2_optab; break;
1937 CASE_FLT_FN (BUILT_IN_LOG1P):
1938 errno_set = true; builtin_optab = log1p_optab; break;
1939 CASE_FLT_FN (BUILT_IN_ASIN):
1940 builtin_optab = asin_optab; break;
1941 CASE_FLT_FN (BUILT_IN_ACOS):
1942 builtin_optab = acos_optab; break;
1943 CASE_FLT_FN (BUILT_IN_TAN):
1944 builtin_optab = tan_optab; break;
1945 CASE_FLT_FN (BUILT_IN_ATAN):
1946 builtin_optab = atan_optab; break;
1947 CASE_FLT_FN (BUILT_IN_FLOOR):
1948 builtin_optab = floor_optab; break;
1949 CASE_FLT_FN (BUILT_IN_CEIL):
1950 builtin_optab = ceil_optab; break;
1951 CASE_FLT_FN (BUILT_IN_TRUNC):
1952 builtin_optab = btrunc_optab; break;
1953 CASE_FLT_FN (BUILT_IN_ROUND):
1954 builtin_optab = round_optab; break;
1955 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1956 builtin_optab = nearbyint_optab;
1957 if (flag_trapping_math)
1958 break;
1959 /* Else fallthrough and expand as rint. */
1960 CASE_FLT_FN (BUILT_IN_RINT):
1961 builtin_optab = rint_optab; break;
1962 default:
1963 gcc_unreachable ();
1966 /* Make a suitable register to place result in. */
1967 mode = TYPE_MODE (TREE_TYPE (exp));
1969 if (! flag_errno_math || ! HONOR_NANS (mode))
1970 errno_set = false;
1972 /* Before working hard, check whether the instruction is available. */
1973 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1975 target = gen_reg_rtx (mode);
1977 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1978 need to expand the argument again. This way, we will not perform
1979 side-effects more the once. */
1980 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1982 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1984 start_sequence ();
1986 /* Compute into TARGET.
1987 Set TARGET to wherever the result comes back. */
1988 target = expand_unop (mode, builtin_optab, op0, target, 0);
1990 if (target != 0)
1992 if (errno_set)
1993 expand_errno_check (exp, target);
1995 /* Output the entire sequence. */
1996 insns = get_insns ();
1997 end_sequence ();
1998 emit_insn (insns);
1999 return target;
2002 /* If we were unable to expand via the builtin, stop the sequence
2003 (without outputting the insns) and call to the library function
2004 with the stabilized argument list. */
2005 end_sequence ();
2008 before_call = get_last_insn ();
2010 return expand_call (exp, target, target == const0_rtx);
2013 /* Expand a call to the builtin binary math functions (pow and atan2).
2014 Return NULL_RTX if a normal call should be emitted rather than expanding the
2015 function in-line. EXP is the expression that is a call to the builtin
2016 function; if convenient, the result should be placed in TARGET.
2017 SUBTARGET may be used as the target for computing one of EXP's
2018 operands. */
2020 static rtx
2021 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2023 optab builtin_optab;
2024 rtx op0, op1, insns;
2025 int op1_type = REAL_TYPE;
2026 tree fndecl = get_callee_fndecl (exp);
2027 tree arg0, arg1;
2028 enum machine_mode mode;
2029 bool errno_set = true;
2031 switch (DECL_FUNCTION_CODE (fndecl))
2033 CASE_FLT_FN (BUILT_IN_SCALBN):
2034 CASE_FLT_FN (BUILT_IN_SCALBLN):
2035 CASE_FLT_FN (BUILT_IN_LDEXP):
2036 op1_type = INTEGER_TYPE;
2037 default:
2038 break;
2041 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2042 return NULL_RTX;
2044 arg0 = CALL_EXPR_ARG (exp, 0);
2045 arg1 = CALL_EXPR_ARG (exp, 1);
2047 switch (DECL_FUNCTION_CODE (fndecl))
2049 CASE_FLT_FN (BUILT_IN_POW):
2050 builtin_optab = pow_optab; break;
2051 CASE_FLT_FN (BUILT_IN_ATAN2):
2052 builtin_optab = atan2_optab; break;
2053 CASE_FLT_FN (BUILT_IN_SCALB):
2054 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2055 return 0;
2056 builtin_optab = scalb_optab; break;
2057 CASE_FLT_FN (BUILT_IN_SCALBN):
2058 CASE_FLT_FN (BUILT_IN_SCALBLN):
2059 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2060 return 0;
2061 /* Fall through... */
2062 CASE_FLT_FN (BUILT_IN_LDEXP):
2063 builtin_optab = ldexp_optab; break;
2064 CASE_FLT_FN (BUILT_IN_FMOD):
2065 builtin_optab = fmod_optab; break;
2066 CASE_FLT_FN (BUILT_IN_REMAINDER):
2067 CASE_FLT_FN (BUILT_IN_DREM):
2068 builtin_optab = remainder_optab; break;
2069 default:
2070 gcc_unreachable ();
2073 /* Make a suitable register to place result in. */
2074 mode = TYPE_MODE (TREE_TYPE (exp));
2076 /* Before working hard, check whether the instruction is available. */
2077 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2078 return NULL_RTX;
2080 target = gen_reg_rtx (mode);
2082 if (! flag_errno_math || ! HONOR_NANS (mode))
2083 errno_set = false;
2085 /* Always stabilize the argument list. */
2086 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2087 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2089 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2090 op1 = expand_normal (arg1);
2092 start_sequence ();
2094 /* Compute into TARGET.
2095 Set TARGET to wherever the result comes back. */
2096 target = expand_binop (mode, builtin_optab, op0, op1,
2097 target, 0, OPTAB_DIRECT);
2099 /* If we were unable to expand via the builtin, stop the sequence
2100 (without outputting the insns) and call to the library function
2101 with the stabilized argument list. */
2102 if (target == 0)
2104 end_sequence ();
2105 return expand_call (exp, target, target == const0_rtx);
2108 if (errno_set)
2109 expand_errno_check (exp, target);
2111 /* Output the entire sequence. */
2112 insns = get_insns ();
2113 end_sequence ();
2114 emit_insn (insns);
2116 return target;
2119 /* Expand a call to the builtin sin and cos math functions.
2120 Return NULL_RTX if a normal call should be emitted rather than expanding the
2121 function in-line. EXP is the expression that is a call to the builtin
2122 function; if convenient, the result should be placed in TARGET.
2123 SUBTARGET may be used as the target for computing one of EXP's
2124 operands. */
2126 static rtx
2127 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2129 optab builtin_optab;
2130 rtx op0, insns;
2131 tree fndecl = get_callee_fndecl (exp);
2132 enum machine_mode mode;
2133 tree arg;
2135 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2136 return NULL_RTX;
2138 arg = CALL_EXPR_ARG (exp, 0);
2140 switch (DECL_FUNCTION_CODE (fndecl))
2142 CASE_FLT_FN (BUILT_IN_SIN):
2143 CASE_FLT_FN (BUILT_IN_COS):
2144 builtin_optab = sincos_optab; break;
2145 default:
2146 gcc_unreachable ();
2149 /* Make a suitable register to place result in. */
2150 mode = TYPE_MODE (TREE_TYPE (exp));
2152 /* Check if sincos insn is available, otherwise fallback
2153 to sin or cos insn. */
2154 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2155 switch (DECL_FUNCTION_CODE (fndecl))
2157 CASE_FLT_FN (BUILT_IN_SIN):
2158 builtin_optab = sin_optab; break;
2159 CASE_FLT_FN (BUILT_IN_COS):
2160 builtin_optab = cos_optab; break;
2161 default:
2162 gcc_unreachable ();
2165 /* Before working hard, check whether the instruction is available. */
2166 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2168 target = gen_reg_rtx (mode);
2170 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2171 need to expand the argument again. This way, we will not perform
2172 side-effects more the once. */
2173 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2175 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2177 start_sequence ();
2179 /* Compute into TARGET.
2180 Set TARGET to wherever the result comes back. */
2181 if (builtin_optab == sincos_optab)
2183 int result;
2185 switch (DECL_FUNCTION_CODE (fndecl))
2187 CASE_FLT_FN (BUILT_IN_SIN):
2188 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2189 break;
2190 CASE_FLT_FN (BUILT_IN_COS):
2191 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2192 break;
2193 default:
2194 gcc_unreachable ();
2196 gcc_assert (result);
2198 else
2200 target = expand_unop (mode, builtin_optab, op0, target, 0);
2203 if (target != 0)
2205 /* Output the entire sequence. */
2206 insns = get_insns ();
2207 end_sequence ();
2208 emit_insn (insns);
2209 return target;
2212 /* If we were unable to expand via the builtin, stop the sequence
2213 (without outputting the insns) and call to the library function
2214 with the stabilized argument list. */
2215 end_sequence ();
2218 target = expand_call (exp, target, target == const0_rtx);
2220 return target;
2223 /* Expand a call to one of the builtin math functions that operate on
2224 floating point argument and output an integer result (ilogb, isinf,
2225 isnan, etc).
2226 Return 0 if a normal call should be emitted rather than expanding the
2227 function in-line. EXP is the expression that is a call to the builtin
2228 function; if convenient, the result should be placed in TARGET.
2229 SUBTARGET may be used as the target for computing one of EXP's operands. */
2231 static rtx
2232 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2234 optab builtin_optab = 0;
2235 enum insn_code icode = CODE_FOR_nothing;
2236 rtx op0;
2237 tree fndecl = get_callee_fndecl (exp);
2238 enum machine_mode mode;
2239 bool errno_set = false;
2240 tree arg;
2242 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2243 return NULL_RTX;
2245 arg = CALL_EXPR_ARG (exp, 0);
2247 switch (DECL_FUNCTION_CODE (fndecl))
2249 CASE_FLT_FN (BUILT_IN_ILOGB):
2250 errno_set = true; builtin_optab = ilogb_optab; break;
2251 CASE_FLT_FN (BUILT_IN_ISINF):
2252 builtin_optab = isinf_optab; break;
2253 case BUILT_IN_ISNORMAL:
2254 case BUILT_IN_ISFINITE:
2255 CASE_FLT_FN (BUILT_IN_FINITE):
2256 /* These builtins have no optabs (yet). */
2257 break;
2258 default:
2259 gcc_unreachable ();
2262 /* There's no easy way to detect the case we need to set EDOM. */
2263 if (flag_errno_math && errno_set)
2264 return NULL_RTX;
2266 /* Optab mode depends on the mode of the input argument. */
2267 mode = TYPE_MODE (TREE_TYPE (arg));
2269 if (builtin_optab)
2270 icode = optab_handler (builtin_optab, mode)->insn_code;
2272 /* Before working hard, check whether the instruction is available. */
2273 if (icode != CODE_FOR_nothing)
2275 /* Make a suitable register to place result in. */
2276 if (!target
2277 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2278 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2280 gcc_assert (insn_data[icode].operand[0].predicate
2281 (target, GET_MODE (target)));
2283 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2284 need to expand the argument again. This way, we will not perform
2285 side-effects more the once. */
2286 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2288 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2290 if (mode != GET_MODE (op0))
2291 op0 = convert_to_mode (mode, op0, 0);
2293 /* Compute into TARGET.
2294 Set TARGET to wherever the result comes back. */
2295 emit_unop_insn (icode, target, op0, UNKNOWN);
2296 return target;
2299 /* If there is no optab, try generic code. */
2300 switch (DECL_FUNCTION_CODE (fndecl))
2302 tree result;
2304 CASE_FLT_FN (BUILT_IN_ISINF):
2306 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2307 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2308 tree const type = TREE_TYPE (arg);
2309 REAL_VALUE_TYPE r;
2310 char buf[128];
2312 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2313 real_from_string (&r, buf);
2314 result = build_call_expr (isgr_fn, 2,
2315 fold_build1 (ABS_EXPR, type, arg),
2316 build_real (type, r));
2317 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2319 CASE_FLT_FN (BUILT_IN_FINITE):
2320 case BUILT_IN_ISFINITE:
2322 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2323 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2324 tree const type = TREE_TYPE (arg);
2325 REAL_VALUE_TYPE r;
2326 char buf[128];
2328 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2329 real_from_string (&r, buf);
2330 result = build_call_expr (isle_fn, 2,
2331 fold_build1 (ABS_EXPR, type, arg),
2332 build_real (type, r));
2333 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2335 case BUILT_IN_ISNORMAL:
2337 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2338 islessequal(fabs(x),DBL_MAX). */
2339 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2340 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2341 tree const type = TREE_TYPE (arg);
2342 REAL_VALUE_TYPE rmax, rmin;
2343 char buf[128];
2345 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2346 real_from_string (&rmax, buf);
2347 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2348 real_from_string (&rmin, buf);
2349 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2350 result = build_call_expr (isle_fn, 2, arg,
2351 build_real (type, rmax));
2352 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2353 build_call_expr (isge_fn, 2, arg,
2354 build_real (type, rmin)));
2355 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2357 default:
2358 break;
2361 target = expand_call (exp, target, target == const0_rtx);
2363 return target;
2366 /* Expand a call to the builtin sincos math function.
2367 Return NULL_RTX if a normal call should be emitted rather than expanding the
2368 function in-line. EXP is the expression that is a call to the builtin
2369 function. */
2371 static rtx
2372 expand_builtin_sincos (tree exp)
2374 rtx op0, op1, op2, target1, target2;
2375 enum machine_mode mode;
2376 tree arg, sinp, cosp;
2377 int result;
2379 if (!validate_arglist (exp, REAL_TYPE,
2380 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2381 return NULL_RTX;
2383 arg = CALL_EXPR_ARG (exp, 0);
2384 sinp = CALL_EXPR_ARG (exp, 1);
2385 cosp = CALL_EXPR_ARG (exp, 2);
2387 /* Make a suitable register to place result in. */
2388 mode = TYPE_MODE (TREE_TYPE (arg));
2390 /* Check if sincos insn is available, otherwise emit the call. */
2391 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2392 return NULL_RTX;
2394 target1 = gen_reg_rtx (mode);
2395 target2 = gen_reg_rtx (mode);
2397 op0 = expand_normal (arg);
2398 op1 = expand_normal (build_fold_indirect_ref (sinp));
2399 op2 = expand_normal (build_fold_indirect_ref (cosp));
2401 /* Compute into target1 and target2.
2402 Set TARGET to wherever the result comes back. */
2403 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2404 gcc_assert (result);
2406 /* Move target1 and target2 to the memory locations indicated
2407 by op1 and op2. */
2408 emit_move_insn (op1, target1);
2409 emit_move_insn (op2, target2);
2411 return const0_rtx;
2414 /* Expand a call to the internal cexpi builtin to the sincos math function.
2415 EXP is the expression that is a call to the builtin function; if convenient,
2416 the result should be placed in TARGET. SUBTARGET may be used as the target
2417 for computing one of EXP's operands. */
2419 static rtx
2420 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2422 tree fndecl = get_callee_fndecl (exp);
2423 tree arg, type;
2424 enum machine_mode mode;
2425 rtx op0, op1, op2;
2427 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2428 return NULL_RTX;
2430 arg = CALL_EXPR_ARG (exp, 0);
2431 type = TREE_TYPE (arg);
2432 mode = TYPE_MODE (TREE_TYPE (arg));
2434 /* Try expanding via a sincos optab, fall back to emitting a libcall
2435 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2436 is only generated from sincos, cexp or if we have either of them. */
2437 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2439 op1 = gen_reg_rtx (mode);
2440 op2 = gen_reg_rtx (mode);
2442 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2444 /* Compute into op1 and op2. */
2445 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2447 else if (TARGET_HAS_SINCOS)
2449 tree call, fn = NULL_TREE;
2450 tree top1, top2;
2451 rtx op1a, op2a;
2453 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2454 fn = built_in_decls[BUILT_IN_SINCOSF];
2455 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2456 fn = built_in_decls[BUILT_IN_SINCOS];
2457 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2458 fn = built_in_decls[BUILT_IN_SINCOSL];
2459 else
2460 gcc_unreachable ();
2462 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2463 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2464 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2465 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2466 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2467 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2469 /* Make sure not to fold the sincos call again. */
2470 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2471 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2472 call, 3, arg, top1, top2));
2474 else
2476 tree call, fn = NULL_TREE, narg;
2477 tree ctype = build_complex_type (type);
2479 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2480 fn = built_in_decls[BUILT_IN_CEXPF];
2481 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2482 fn = built_in_decls[BUILT_IN_CEXP];
2483 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2484 fn = built_in_decls[BUILT_IN_CEXPL];
2485 else
2486 gcc_unreachable ();
2488 /* If we don't have a decl for cexp create one. This is the
2489 friendliest fallback if the user calls __builtin_cexpi
2490 without full target C99 function support. */
2491 if (fn == NULL_TREE)
2493 tree fntype;
2494 const char *name = NULL;
2496 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2497 name = "cexpf";
2498 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2499 name = "cexp";
2500 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2501 name = "cexpl";
2503 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2504 fn = build_fn_decl (name, fntype);
2507 narg = fold_build2 (COMPLEX_EXPR, ctype,
2508 build_real (type, dconst0), arg);
2510 /* Make sure not to fold the cexp call again. */
2511 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2512 return expand_expr (build_call_nary (ctype, call, 1, narg),
2513 target, VOIDmode, EXPAND_NORMAL);
2516 /* Now build the proper return type. */
2517 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2518 make_tree (TREE_TYPE (arg), op2),
2519 make_tree (TREE_TYPE (arg), op1)),
2520 target, VOIDmode, EXPAND_NORMAL);
2523 /* Expand a call to one of the builtin rounding functions gcc defines
2524 as an extension (lfloor and lceil). As these are gcc extensions we
2525 do not need to worry about setting errno to EDOM.
2526 If expanding via optab fails, lower expression to (int)(floor(x)).
2527 EXP is the expression that is a call to the builtin function;
2528 if convenient, the result should be placed in TARGET. */
2530 static rtx
2531 expand_builtin_int_roundingfn (tree exp, rtx target)
2533 convert_optab builtin_optab;
2534 rtx op0, insns, tmp;
2535 tree fndecl = get_callee_fndecl (exp);
2536 enum built_in_function fallback_fn;
2537 tree fallback_fndecl;
2538 enum machine_mode mode;
2539 tree arg;
2541 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2542 gcc_unreachable ();
2544 arg = CALL_EXPR_ARG (exp, 0);
2546 switch (DECL_FUNCTION_CODE (fndecl))
2548 CASE_FLT_FN (BUILT_IN_LCEIL):
2549 CASE_FLT_FN (BUILT_IN_LLCEIL):
2550 builtin_optab = lceil_optab;
2551 fallback_fn = BUILT_IN_CEIL;
2552 break;
2554 CASE_FLT_FN (BUILT_IN_LFLOOR):
2555 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2556 builtin_optab = lfloor_optab;
2557 fallback_fn = BUILT_IN_FLOOR;
2558 break;
2560 default:
2561 gcc_unreachable ();
2564 /* Make a suitable register to place result in. */
2565 mode = TYPE_MODE (TREE_TYPE (exp));
2567 target = gen_reg_rtx (mode);
2569 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2570 need to expand the argument again. This way, we will not perform
2571 side-effects more the once. */
2572 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2574 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2576 start_sequence ();
2578 /* Compute into TARGET. */
2579 if (expand_sfix_optab (target, op0, builtin_optab))
2581 /* Output the entire sequence. */
2582 insns = get_insns ();
2583 end_sequence ();
2584 emit_insn (insns);
2585 return target;
2588 /* If we were unable to expand via the builtin, stop the sequence
2589 (without outputting the insns). */
2590 end_sequence ();
2592 /* Fall back to floating point rounding optab. */
2593 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2595 /* For non-C99 targets we may end up without a fallback fndecl here
2596 if the user called __builtin_lfloor directly. In this case emit
2597 a call to the floor/ceil variants nevertheless. This should result
2598 in the best user experience for not full C99 targets. */
2599 if (fallback_fndecl == NULL_TREE)
2601 tree fntype;
2602 const char *name = NULL;
2604 switch (DECL_FUNCTION_CODE (fndecl))
2606 case BUILT_IN_LCEIL:
2607 case BUILT_IN_LLCEIL:
2608 name = "ceil";
2609 break;
2610 case BUILT_IN_LCEILF:
2611 case BUILT_IN_LLCEILF:
2612 name = "ceilf";
2613 break;
2614 case BUILT_IN_LCEILL:
2615 case BUILT_IN_LLCEILL:
2616 name = "ceill";
2617 break;
2618 case BUILT_IN_LFLOOR:
2619 case BUILT_IN_LLFLOOR:
2620 name = "floor";
2621 break;
2622 case BUILT_IN_LFLOORF:
2623 case BUILT_IN_LLFLOORF:
2624 name = "floorf";
2625 break;
2626 case BUILT_IN_LFLOORL:
2627 case BUILT_IN_LLFLOORL:
2628 name = "floorl";
2629 break;
2630 default:
2631 gcc_unreachable ();
2634 fntype = build_function_type_list (TREE_TYPE (arg),
2635 TREE_TYPE (arg), NULL_TREE);
2636 fallback_fndecl = build_fn_decl (name, fntype);
2639 exp = build_call_expr (fallback_fndecl, 1, arg);
2641 tmp = expand_normal (exp);
2643 /* Truncate the result of floating point optab to integer
2644 via expand_fix (). */
2645 target = gen_reg_rtx (mode);
2646 expand_fix (target, tmp, 0);
2648 return target;
2651 /* Expand a call to one of the builtin math functions doing integer
2652 conversion (lrint).
2653 Return 0 if a normal call should be emitted rather than expanding the
2654 function in-line. EXP is the expression that is a call to the builtin
2655 function; if convenient, the result should be placed in TARGET. */
2657 static rtx
2658 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2660 convert_optab builtin_optab;
2661 rtx op0, insns;
2662 tree fndecl = get_callee_fndecl (exp);
2663 tree arg;
2664 enum machine_mode mode;
2666 /* There's no easy way to detect the case we need to set EDOM. */
2667 if (flag_errno_math)
2668 return NULL_RTX;
2670 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2671 gcc_unreachable ();
2673 arg = CALL_EXPR_ARG (exp, 0);
2675 switch (DECL_FUNCTION_CODE (fndecl))
2677 CASE_FLT_FN (BUILT_IN_LRINT):
2678 CASE_FLT_FN (BUILT_IN_LLRINT):
2679 builtin_optab = lrint_optab; break;
2680 CASE_FLT_FN (BUILT_IN_LROUND):
2681 CASE_FLT_FN (BUILT_IN_LLROUND):
2682 builtin_optab = lround_optab; break;
2683 default:
2684 gcc_unreachable ();
2687 /* Make a suitable register to place result in. */
2688 mode = TYPE_MODE (TREE_TYPE (exp));
2690 target = gen_reg_rtx (mode);
2692 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2693 need to expand the argument again. This way, we will not perform
2694 side-effects more the once. */
2695 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2697 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2699 start_sequence ();
2701 if (expand_sfix_optab (target, op0, builtin_optab))
2703 /* Output the entire sequence. */
2704 insns = get_insns ();
2705 end_sequence ();
2706 emit_insn (insns);
2707 return target;
2710 /* If we were unable to expand via the builtin, stop the sequence
2711 (without outputting the insns) and call to the library function
2712 with the stabilized argument list. */
2713 end_sequence ();
2715 target = expand_call (exp, target, target == const0_rtx);
2717 return target;
2720 /* To evaluate powi(x,n), the floating point value x raised to the
2721 constant integer exponent n, we use a hybrid algorithm that
2722 combines the "window method" with look-up tables. For an
2723 introduction to exponentiation algorithms and "addition chains",
2724 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2725 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2726 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2727 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2729 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2730 multiplications to inline before calling the system library's pow
2731 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2732 so this default never requires calling pow, powf or powl. */
2734 #ifndef POWI_MAX_MULTS
2735 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2736 #endif
2738 /* The size of the "optimal power tree" lookup table. All
2739 exponents less than this value are simply looked up in the
2740 powi_table below. This threshold is also used to size the
2741 cache of pseudo registers that hold intermediate results. */
2742 #define POWI_TABLE_SIZE 256
2744 /* The size, in bits of the window, used in the "window method"
2745 exponentiation algorithm. This is equivalent to a radix of
2746 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2747 #define POWI_WINDOW_SIZE 3
2749 /* The following table is an efficient representation of an
2750 "optimal power tree". For each value, i, the corresponding
2751 value, j, in the table states than an optimal evaluation
2752 sequence for calculating pow(x,i) can be found by evaluating
2753 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2754 100 integers is given in Knuth's "Seminumerical algorithms". */
2756 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2758 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2759 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2760 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2761 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2762 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2763 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2764 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2765 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2766 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2767 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2768 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2769 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2770 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2771 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2772 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2773 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2774 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2775 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2776 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2777 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2778 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2779 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2780 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2781 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2782 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2783 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2784 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2785 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2786 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2787 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2788 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2789 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2793 /* Return the number of multiplications required to calculate
2794 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2795 subroutine of powi_cost. CACHE is an array indicating
2796 which exponents have already been calculated. */
2798 static int
2799 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2801 /* If we've already calculated this exponent, then this evaluation
2802 doesn't require any additional multiplications. */
2803 if (cache[n])
2804 return 0;
2806 cache[n] = true;
2807 return powi_lookup_cost (n - powi_table[n], cache)
2808 + powi_lookup_cost (powi_table[n], cache) + 1;
2811 /* Return the number of multiplications required to calculate
2812 powi(x,n) for an arbitrary x, given the exponent N. This
2813 function needs to be kept in sync with expand_powi below. */
2815 static int
2816 powi_cost (HOST_WIDE_INT n)
2818 bool cache[POWI_TABLE_SIZE];
2819 unsigned HOST_WIDE_INT digit;
2820 unsigned HOST_WIDE_INT val;
2821 int result;
2823 if (n == 0)
2824 return 0;
2826 /* Ignore the reciprocal when calculating the cost. */
2827 val = (n < 0) ? -n : n;
2829 /* Initialize the exponent cache. */
2830 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2831 cache[1] = true;
2833 result = 0;
2835 while (val >= POWI_TABLE_SIZE)
2837 if (val & 1)
2839 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2840 result += powi_lookup_cost (digit, cache)
2841 + POWI_WINDOW_SIZE + 1;
2842 val >>= POWI_WINDOW_SIZE;
2844 else
2846 val >>= 1;
2847 result++;
2851 return result + powi_lookup_cost (val, cache);
2854 /* Recursive subroutine of expand_powi. This function takes the array,
2855 CACHE, of already calculated exponents and an exponent N and returns
2856 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2858 static rtx
2859 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2861 unsigned HOST_WIDE_INT digit;
2862 rtx target, result;
2863 rtx op0, op1;
2865 if (n < POWI_TABLE_SIZE)
2867 if (cache[n])
2868 return cache[n];
2870 target = gen_reg_rtx (mode);
2871 cache[n] = target;
2873 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2874 op1 = expand_powi_1 (mode, powi_table[n], cache);
2876 else if (n & 1)
2878 target = gen_reg_rtx (mode);
2879 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2880 op0 = expand_powi_1 (mode, n - digit, cache);
2881 op1 = expand_powi_1 (mode, digit, cache);
2883 else
2885 target = gen_reg_rtx (mode);
2886 op0 = expand_powi_1 (mode, n >> 1, cache);
2887 op1 = op0;
2890 result = expand_mult (mode, op0, op1, target, 0);
2891 if (result != target)
2892 emit_move_insn (target, result);
2893 return target;
2896 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2897 floating point operand in mode MODE, and N is the exponent. This
2898 function needs to be kept in sync with powi_cost above. */
2900 static rtx
2901 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2903 unsigned HOST_WIDE_INT val;
2904 rtx cache[POWI_TABLE_SIZE];
2905 rtx result;
2907 if (n == 0)
2908 return CONST1_RTX (mode);
2910 val = (n < 0) ? -n : n;
2912 memset (cache, 0, sizeof (cache));
2913 cache[1] = x;
2915 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2917 /* If the original exponent was negative, reciprocate the result. */
2918 if (n < 0)
2919 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2920 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2922 return result;
2925 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2926 a normal call should be emitted rather than expanding the function
2927 in-line. EXP is the expression that is a call to the builtin
2928 function; if convenient, the result should be placed in TARGET. */
2930 static rtx
2931 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2933 tree arg0, arg1;
2934 tree fn, narg0;
2935 tree type = TREE_TYPE (exp);
2936 REAL_VALUE_TYPE cint, c, c2;
2937 HOST_WIDE_INT n;
2938 rtx op, op2;
2939 enum machine_mode mode = TYPE_MODE (type);
2941 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2942 return NULL_RTX;
2944 arg0 = CALL_EXPR_ARG (exp, 0);
2945 arg1 = CALL_EXPR_ARG (exp, 1);
2947 if (TREE_CODE (arg1) != REAL_CST
2948 || TREE_OVERFLOW (arg1))
2949 return expand_builtin_mathfn_2 (exp, target, subtarget);
2951 /* Handle constant exponents. */
2953 /* For integer valued exponents we can expand to an optimal multiplication
2954 sequence using expand_powi. */
2955 c = TREE_REAL_CST (arg1);
2956 n = real_to_integer (&c);
2957 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2958 if (real_identical (&c, &cint)
2959 && ((n >= -1 && n <= 2)
2960 || (flag_unsafe_math_optimizations
2961 && optimize_insn_for_speed_p ()
2962 && powi_cost (n) <= POWI_MAX_MULTS)))
2964 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2965 if (n != 1)
2967 op = force_reg (mode, op);
2968 op = expand_powi (op, mode, n);
2970 return op;
2973 narg0 = builtin_save_expr (arg0);
2975 /* If the exponent is not integer valued, check if it is half of an integer.
2976 In this case we can expand to sqrt (x) * x**(n/2). */
2977 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2978 if (fn != NULL_TREE)
2980 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2981 n = real_to_integer (&c2);
2982 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2983 if (real_identical (&c2, &cint)
2984 && ((flag_unsafe_math_optimizations
2985 && optimize_insn_for_speed_p ()
2986 && powi_cost (n/2) <= POWI_MAX_MULTS)
2987 || n == 1))
2989 tree call_expr = build_call_expr (fn, 1, narg0);
2990 /* Use expand_expr in case the newly built call expression
2991 was folded to a non-call. */
2992 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2993 if (n != 1)
2995 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2996 op2 = force_reg (mode, op2);
2997 op2 = expand_powi (op2, mode, abs (n / 2));
2998 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2999 0, OPTAB_LIB_WIDEN);
3000 /* If the original exponent was negative, reciprocate the
3001 result. */
3002 if (n < 0)
3003 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3004 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3006 return op;
3010 /* Try if the exponent is a third of an integer. In this case
3011 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3012 different from pow (x, 1./3.) due to rounding and behavior
3013 with negative x we need to constrain this transformation to
3014 unsafe math and positive x or finite math. */
3015 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3016 if (fn != NULL_TREE
3017 && flag_unsafe_math_optimizations
3018 && (tree_expr_nonnegative_p (arg0)
3019 || !HONOR_NANS (mode)))
3021 REAL_VALUE_TYPE dconst3;
3022 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3023 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3024 real_round (&c2, mode, &c2);
3025 n = real_to_integer (&c2);
3026 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3027 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3028 real_convert (&c2, mode, &c2);
3029 if (real_identical (&c2, &c)
3030 && ((optimize_insn_for_speed_p ()
3031 && powi_cost (n/3) <= POWI_MAX_MULTS)
3032 || n == 1))
3034 tree call_expr = build_call_expr (fn, 1,narg0);
3035 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3036 if (abs (n) % 3 == 2)
3037 op = expand_simple_binop (mode, MULT, op, op, op,
3038 0, OPTAB_LIB_WIDEN);
3039 if (n != 1)
3041 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3042 op2 = force_reg (mode, op2);
3043 op2 = expand_powi (op2, mode, abs (n / 3));
3044 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3045 0, OPTAB_LIB_WIDEN);
3046 /* If the original exponent was negative, reciprocate the
3047 result. */
3048 if (n < 0)
3049 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3050 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3052 return op;
3056 /* Fall back to optab expansion. */
3057 return expand_builtin_mathfn_2 (exp, target, subtarget);
3060 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3061 a normal call should be emitted rather than expanding the function
3062 in-line. EXP is the expression that is a call to the builtin
3063 function; if convenient, the result should be placed in TARGET. */
3065 static rtx
3066 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3068 tree arg0, arg1;
3069 rtx op0, op1;
3070 enum machine_mode mode;
3071 enum machine_mode mode2;
3073 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3074 return NULL_RTX;
3076 arg0 = CALL_EXPR_ARG (exp, 0);
3077 arg1 = CALL_EXPR_ARG (exp, 1);
3078 mode = TYPE_MODE (TREE_TYPE (exp));
3080 /* Handle constant power. */
3082 if (TREE_CODE (arg1) == INTEGER_CST
3083 && !TREE_OVERFLOW (arg1))
3085 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3087 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3088 Otherwise, check the number of multiplications required. */
3089 if ((TREE_INT_CST_HIGH (arg1) == 0
3090 || TREE_INT_CST_HIGH (arg1) == -1)
3091 && ((n >= -1 && n <= 2)
3092 || (optimize_insn_for_speed_p ()
3093 && powi_cost (n) <= POWI_MAX_MULTS)))
3095 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3096 op0 = force_reg (mode, op0);
3097 return expand_powi (op0, mode, n);
3101 /* Emit a libcall to libgcc. */
3103 /* Mode of the 2nd argument must match that of an int. */
3104 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3106 if (target == NULL_RTX)
3107 target = gen_reg_rtx (mode);
3109 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3110 if (GET_MODE (op0) != mode)
3111 op0 = convert_to_mode (mode, op0, 0);
3112 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3113 if (GET_MODE (op1) != mode2)
3114 op1 = convert_to_mode (mode2, op1, 0);
3116 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3117 target, LCT_CONST, mode, 2,
3118 op0, mode, op1, mode2);
3120 return target;
3123 /* Expand expression EXP which is a call to the strlen builtin. Return
3124 NULL_RTX if we failed the caller should emit a normal call, otherwise
3125 try to get the result in TARGET, if convenient. */
3127 static rtx
3128 expand_builtin_strlen (tree exp, rtx target,
3129 enum machine_mode target_mode)
3131 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3132 return NULL_RTX;
3133 else
3135 rtx pat;
3136 tree len;
3137 tree src = CALL_EXPR_ARG (exp, 0);
3138 rtx result, src_reg, char_rtx, before_strlen;
3139 enum machine_mode insn_mode = target_mode, char_mode;
3140 enum insn_code icode = CODE_FOR_nothing;
3141 int align;
3143 /* If the length can be computed at compile-time, return it. */
3144 len = c_strlen (src, 0);
3145 if (len)
3146 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3148 /* If the length can be computed at compile-time and is constant
3149 integer, but there are side-effects in src, evaluate
3150 src for side-effects, then return len.
3151 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3152 can be optimized into: i++; x = 3; */
3153 len = c_strlen (src, 1);
3154 if (len && TREE_CODE (len) == INTEGER_CST)
3156 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3157 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3160 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3162 /* If SRC is not a pointer type, don't do this operation inline. */
3163 if (align == 0)
3164 return NULL_RTX;
3166 /* Bail out if we can't compute strlen in the right mode. */
3167 while (insn_mode != VOIDmode)
3169 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3170 if (icode != CODE_FOR_nothing)
3171 break;
3173 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3175 if (insn_mode == VOIDmode)
3176 return NULL_RTX;
3178 /* Make a place to write the result of the instruction. */
3179 result = target;
3180 if (! (result != 0
3181 && REG_P (result)
3182 && GET_MODE (result) == insn_mode
3183 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3184 result = gen_reg_rtx (insn_mode);
3186 /* Make a place to hold the source address. We will not expand
3187 the actual source until we are sure that the expansion will
3188 not fail -- there are trees that cannot be expanded twice. */
3189 src_reg = gen_reg_rtx (Pmode);
3191 /* Mark the beginning of the strlen sequence so we can emit the
3192 source operand later. */
3193 before_strlen = get_last_insn ();
3195 char_rtx = const0_rtx;
3196 char_mode = insn_data[(int) icode].operand[2].mode;
3197 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3198 char_mode))
3199 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3201 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3202 char_rtx, GEN_INT (align));
3203 if (! pat)
3204 return NULL_RTX;
3205 emit_insn (pat);
3207 /* Now that we are assured of success, expand the source. */
3208 start_sequence ();
3209 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3210 if (pat != src_reg)
3211 emit_move_insn (src_reg, pat);
3212 pat = get_insns ();
3213 end_sequence ();
3215 if (before_strlen)
3216 emit_insn_after (pat, before_strlen);
3217 else
3218 emit_insn_before (pat, get_insns ());
3220 /* Return the value in the proper mode for this function. */
3221 if (GET_MODE (result) == target_mode)
3222 target = result;
3223 else if (target != 0)
3224 convert_move (target, result, 0);
3225 else
3226 target = convert_to_mode (target_mode, result, 0);
3228 return target;
3232 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3233 caller should emit a normal call, otherwise try to get the result
3234 in TARGET, if convenient (and in mode MODE if that's convenient). */
3236 static rtx
3237 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3239 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3241 tree type = TREE_TYPE (exp);
3242 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3243 CALL_EXPR_ARG (exp, 1), type);
3244 if (result)
3245 return expand_expr (result, target, mode, EXPAND_NORMAL);
3247 return NULL_RTX;
3250 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3251 caller should emit a normal call, otherwise try to get the result
3252 in TARGET, if convenient (and in mode MODE if that's convenient). */
3254 static rtx
3255 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3257 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3259 tree type = TREE_TYPE (exp);
3260 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3261 CALL_EXPR_ARG (exp, 1), type);
3262 if (result)
3263 return expand_expr (result, target, mode, EXPAND_NORMAL);
3265 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3267 return NULL_RTX;
3270 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3271 caller should emit a normal call, otherwise try to get the result
3272 in TARGET, if convenient (and in mode MODE if that's convenient). */
3274 static rtx
3275 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3277 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3279 tree type = TREE_TYPE (exp);
3280 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3281 CALL_EXPR_ARG (exp, 1), type);
3282 if (result)
3283 return expand_expr (result, target, mode, EXPAND_NORMAL);
3285 return NULL_RTX;
3288 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3289 caller should emit a normal call, otherwise try to get the result
3290 in TARGET, if convenient (and in mode MODE if that's convenient). */
3292 static rtx
3293 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3295 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3297 tree type = TREE_TYPE (exp);
3298 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3299 CALL_EXPR_ARG (exp, 1), type);
3300 if (result)
3301 return expand_expr (result, target, mode, EXPAND_NORMAL);
3303 return NULL_RTX;
3306 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3307 bytes from constant string DATA + OFFSET and return it as target
3308 constant. */
3310 static rtx
3311 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3312 enum machine_mode mode)
3314 const char *str = (const char *) data;
3316 gcc_assert (offset >= 0
3317 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3318 <= strlen (str) + 1));
3320 return c_readstr (str + offset, mode);
3323 /* Expand a call EXP to the memcpy builtin.
3324 Return NULL_RTX if we failed, the caller should emit a normal call,
3325 otherwise try to get the result in TARGET, if convenient (and in
3326 mode MODE if that's convenient). */
3328 static rtx
3329 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3331 tree fndecl = get_callee_fndecl (exp);
3333 if (!validate_arglist (exp,
3334 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3335 return NULL_RTX;
3336 else
3338 tree dest = CALL_EXPR_ARG (exp, 0);
3339 tree src = CALL_EXPR_ARG (exp, 1);
3340 tree len = CALL_EXPR_ARG (exp, 2);
3341 const char *src_str;
3342 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3343 unsigned int dest_align
3344 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3345 rtx dest_mem, src_mem, dest_addr, len_rtx;
3346 tree result = fold_builtin_memory_op (dest, src, len,
3347 TREE_TYPE (TREE_TYPE (fndecl)),
3348 false, /*endp=*/0);
3349 HOST_WIDE_INT expected_size = -1;
3350 unsigned int expected_align = 0;
3351 tree_ann_common_t ann;
3353 if (result)
3355 while (TREE_CODE (result) == COMPOUND_EXPR)
3357 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3358 EXPAND_NORMAL);
3359 result = TREE_OPERAND (result, 1);
3361 return expand_expr (result, target, mode, EXPAND_NORMAL);
3364 /* If DEST is not a pointer type, call the normal function. */
3365 if (dest_align == 0)
3366 return NULL_RTX;
3368 /* If either SRC is not a pointer type, don't do this
3369 operation in-line. */
3370 if (src_align == 0)
3371 return NULL_RTX;
3373 ann = tree_common_ann (exp);
3374 if (ann)
3375 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3377 if (expected_align < dest_align)
3378 expected_align = dest_align;
3379 dest_mem = get_memory_rtx (dest, len);
3380 set_mem_align (dest_mem, dest_align);
3381 len_rtx = expand_normal (len);
3382 src_str = c_getstr (src);
3384 /* If SRC is a string constant and block move would be done
3385 by pieces, we can avoid loading the string from memory
3386 and only stored the computed constants. */
3387 if (src_str
3388 && GET_CODE (len_rtx) == CONST_INT
3389 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3390 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3391 CONST_CAST (char *, src_str),
3392 dest_align, false))
3394 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3395 builtin_memcpy_read_str,
3396 CONST_CAST (char *, src_str),
3397 dest_align, false, 0);
3398 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3399 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3400 return dest_mem;
3403 src_mem = get_memory_rtx (src, len);
3404 set_mem_align (src_mem, src_align);
3406 /* Copy word part most expediently. */
3407 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3408 CALL_EXPR_TAILCALL (exp)
3409 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3410 expected_align, expected_size);
3412 if (dest_addr == 0)
3414 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3415 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3417 return dest_addr;
3421 /* Expand a call EXP to the mempcpy builtin.
3422 Return NULL_RTX if we failed; the caller should emit a normal call,
3423 otherwise try to get the result in TARGET, if convenient (and in
3424 mode MODE if that's convenient). If ENDP is 0 return the
3425 destination pointer, if ENDP is 1 return the end pointer ala
3426 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3427 stpcpy. */
3429 static rtx
3430 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3432 if (!validate_arglist (exp,
3433 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3434 return NULL_RTX;
3435 else
3437 tree dest = CALL_EXPR_ARG (exp, 0);
3438 tree src = CALL_EXPR_ARG (exp, 1);
3439 tree len = CALL_EXPR_ARG (exp, 2);
3440 return expand_builtin_mempcpy_args (dest, src, len,
3441 TREE_TYPE (exp),
3442 target, mode, /*endp=*/ 1);
3446 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3447 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3448 so that this can also be called without constructing an actual CALL_EXPR.
3449 TYPE is the return type of the call. The other arguments and return value
3450 are the same as for expand_builtin_mempcpy. */
3452 static rtx
3453 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3454 rtx target, enum machine_mode mode, int endp)
3456 /* If return value is ignored, transform mempcpy into memcpy. */
3457 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3459 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3460 tree result = build_call_expr (fn, 3, dest, src, len);
3462 while (TREE_CODE (result) == COMPOUND_EXPR)
3464 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3465 EXPAND_NORMAL);
3466 result = TREE_OPERAND (result, 1);
3468 return expand_expr (result, target, mode, EXPAND_NORMAL);
3470 else
3472 const char *src_str;
3473 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3474 unsigned int dest_align
3475 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3476 rtx dest_mem, src_mem, len_rtx;
3477 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3479 if (result)
3481 while (TREE_CODE (result) == COMPOUND_EXPR)
3483 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3484 EXPAND_NORMAL);
3485 result = TREE_OPERAND (result, 1);
3487 return expand_expr (result, target, mode, EXPAND_NORMAL);
3490 /* If either SRC or DEST is not a pointer type, don't do this
3491 operation in-line. */
3492 if (dest_align == 0 || src_align == 0)
3493 return NULL_RTX;
3495 /* If LEN is not constant, call the normal function. */
3496 if (! host_integerp (len, 1))
3497 return NULL_RTX;
3499 len_rtx = expand_normal (len);
3500 src_str = c_getstr (src);
3502 /* If SRC is a string constant and block move would be done
3503 by pieces, we can avoid loading the string from memory
3504 and only stored the computed constants. */
3505 if (src_str
3506 && GET_CODE (len_rtx) == CONST_INT
3507 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3508 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3509 CONST_CAST (char *, src_str),
3510 dest_align, false))
3512 dest_mem = get_memory_rtx (dest, len);
3513 set_mem_align (dest_mem, dest_align);
3514 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3515 builtin_memcpy_read_str,
3516 CONST_CAST (char *, src_str),
3517 dest_align, false, endp);
3518 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3519 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3520 return dest_mem;
3523 if (GET_CODE (len_rtx) == CONST_INT
3524 && can_move_by_pieces (INTVAL (len_rtx),
3525 MIN (dest_align, src_align)))
3527 dest_mem = get_memory_rtx (dest, len);
3528 set_mem_align (dest_mem, dest_align);
3529 src_mem = get_memory_rtx (src, len);
3530 set_mem_align (src_mem, src_align);
3531 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3532 MIN (dest_align, src_align), endp);
3533 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3534 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3535 return dest_mem;
3538 return NULL_RTX;
3542 /* Expand expression EXP, which is a call to the memmove builtin. Return
3543 NULL_RTX if we failed; the caller should emit a normal call. */
3545 static rtx
3546 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3548 if (!validate_arglist (exp,
3549 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3550 return NULL_RTX;
3551 else
3553 tree dest = CALL_EXPR_ARG (exp, 0);
3554 tree src = CALL_EXPR_ARG (exp, 1);
3555 tree len = CALL_EXPR_ARG (exp, 2);
3556 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3557 target, mode, ignore);
3561 /* Helper function to do the actual work for expand_builtin_memmove. The
3562 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3563 so that this can also be called without constructing an actual CALL_EXPR.
3564 TYPE is the return type of the call. The other arguments and return value
3565 are the same as for expand_builtin_memmove. */
3567 static rtx
3568 expand_builtin_memmove_args (tree dest, tree src, tree len,
3569 tree type, rtx target, enum machine_mode mode,
3570 int ignore)
3572 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3574 if (result)
3576 STRIP_TYPE_NOPS (result);
3577 while (TREE_CODE (result) == COMPOUND_EXPR)
3579 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3580 EXPAND_NORMAL);
3581 result = TREE_OPERAND (result, 1);
3583 return expand_expr (result, target, mode, EXPAND_NORMAL);
3586 /* Otherwise, call the normal function. */
3587 return NULL_RTX;
3590 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3591 NULL_RTX if we failed the caller should emit a normal call. */
3593 static rtx
3594 expand_builtin_bcopy (tree exp, int ignore)
3596 tree type = TREE_TYPE (exp);
3597 tree src, dest, size;
3599 if (!validate_arglist (exp,
3600 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3601 return NULL_RTX;
3603 src = CALL_EXPR_ARG (exp, 0);
3604 dest = CALL_EXPR_ARG (exp, 1);
3605 size = CALL_EXPR_ARG (exp, 2);
3607 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3608 This is done this way so that if it isn't expanded inline, we fall
3609 back to calling bcopy instead of memmove. */
3610 return expand_builtin_memmove_args (dest, src,
3611 fold_convert (sizetype, size),
3612 type, const0_rtx, VOIDmode,
3613 ignore);
3616 #ifndef HAVE_movstr
3617 # define HAVE_movstr 0
3618 # define CODE_FOR_movstr CODE_FOR_nothing
3619 #endif
3621 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3622 we failed, the caller should emit a normal call, otherwise try to
3623 get the result in TARGET, if convenient. If ENDP is 0 return the
3624 destination pointer, if ENDP is 1 return the end pointer ala
3625 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3626 stpcpy. */
3628 static rtx
3629 expand_movstr (tree dest, tree src, rtx target, int endp)
3631 rtx end;
3632 rtx dest_mem;
3633 rtx src_mem;
3634 rtx insn;
3635 const struct insn_data * data;
3637 if (!HAVE_movstr)
3638 return NULL_RTX;
3640 dest_mem = get_memory_rtx (dest, NULL);
3641 src_mem = get_memory_rtx (src, NULL);
3642 if (!endp)
3644 target = force_reg (Pmode, XEXP (dest_mem, 0));
3645 dest_mem = replace_equiv_address (dest_mem, target);
3646 end = gen_reg_rtx (Pmode);
3648 else
3650 if (target == 0 || target == const0_rtx)
3652 end = gen_reg_rtx (Pmode);
3653 if (target == 0)
3654 target = end;
3656 else
3657 end = target;
3660 data = insn_data + CODE_FOR_movstr;
3662 if (data->operand[0].mode != VOIDmode)
3663 end = gen_lowpart (data->operand[0].mode, end);
3665 insn = data->genfun (end, dest_mem, src_mem);
3667 gcc_assert (insn);
3669 emit_insn (insn);
3671 /* movstr is supposed to set end to the address of the NUL
3672 terminator. If the caller requested a mempcpy-like return value,
3673 adjust it. */
3674 if (endp == 1 && target != const0_rtx)
3676 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3677 emit_move_insn (target, force_operand (tem, NULL_RTX));
3680 return target;
3683 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3684 NULL_RTX if we failed the caller should emit a normal call, otherwise
3685 try to get the result in TARGET, if convenient (and in mode MODE if that's
3686 convenient). */
3688 static rtx
3689 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3691 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3693 tree dest = CALL_EXPR_ARG (exp, 0);
3694 tree src = CALL_EXPR_ARG (exp, 1);
3695 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3697 return NULL_RTX;
3700 /* Helper function to do the actual work for expand_builtin_strcpy. The
3701 arguments to the builtin_strcpy call DEST and SRC are broken out
3702 so that this can also be called without constructing an actual CALL_EXPR.
3703 The other arguments and return value are the same as for
3704 expand_builtin_strcpy. */
3706 static rtx
3707 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3708 rtx target, enum machine_mode mode)
3710 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3711 if (result)
3712 return expand_expr (result, target, mode, EXPAND_NORMAL);
3713 return expand_movstr (dest, src, target, /*endp=*/0);
3717 /* Expand a call EXP to the stpcpy builtin.
3718 Return NULL_RTX if we failed the caller should emit a normal call,
3719 otherwise try to get the result in TARGET, if convenient (and in
3720 mode MODE if that's convenient). */
3722 static rtx
3723 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3725 tree dst, src;
3727 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3728 return NULL_RTX;
3730 dst = CALL_EXPR_ARG (exp, 0);
3731 src = CALL_EXPR_ARG (exp, 1);
3733 /* If return value is ignored, transform stpcpy into strcpy. */
3734 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3736 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3737 tree result = build_call_expr (fn, 2, dst, src);
3739 STRIP_NOPS (result);
3740 while (TREE_CODE (result) == COMPOUND_EXPR)
3742 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3743 EXPAND_NORMAL);
3744 result = TREE_OPERAND (result, 1);
3746 return expand_expr (result, target, mode, EXPAND_NORMAL);
3748 else
3750 tree len, lenp1;
3751 rtx ret;
3753 /* Ensure we get an actual string whose length can be evaluated at
3754 compile-time, not an expression containing a string. This is
3755 because the latter will potentially produce pessimized code
3756 when used to produce the return value. */
3757 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3758 return expand_movstr (dst, src, target, /*endp=*/2);
3760 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3761 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3762 target, mode, /*endp=*/2);
3764 if (ret)
3765 return ret;
3767 if (TREE_CODE (len) == INTEGER_CST)
3769 rtx len_rtx = expand_normal (len);
3771 if (GET_CODE (len_rtx) == CONST_INT)
3773 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3774 dst, src, target, mode);
3776 if (ret)
3778 if (! target)
3780 if (mode != VOIDmode)
3781 target = gen_reg_rtx (mode);
3782 else
3783 target = gen_reg_rtx (GET_MODE (ret));
3785 if (GET_MODE (target) != GET_MODE (ret))
3786 ret = gen_lowpart (GET_MODE (target), ret);
3788 ret = plus_constant (ret, INTVAL (len_rtx));
3789 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3790 gcc_assert (ret);
3792 return target;
3797 return expand_movstr (dst, src, target, /*endp=*/2);
3801 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3802 bytes from constant string DATA + OFFSET and return it as target
3803 constant. */
3806 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3807 enum machine_mode mode)
3809 const char *str = (const char *) data;
3811 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3812 return const0_rtx;
3814 return c_readstr (str + offset, mode);
3817 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3818 NULL_RTX if we failed the caller should emit a normal call. */
3820 static rtx
3821 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3823 tree fndecl = get_callee_fndecl (exp);
3825 if (validate_arglist (exp,
3826 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3828 tree dest = CALL_EXPR_ARG (exp, 0);
3829 tree src = CALL_EXPR_ARG (exp, 1);
3830 tree len = CALL_EXPR_ARG (exp, 2);
3831 tree slen = c_strlen (src, 1);
3832 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3834 if (result)
3836 while (TREE_CODE (result) == COMPOUND_EXPR)
3838 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3839 EXPAND_NORMAL);
3840 result = TREE_OPERAND (result, 1);
3842 return expand_expr (result, target, mode, EXPAND_NORMAL);
3845 /* We must be passed a constant len and src parameter. */
3846 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3847 return NULL_RTX;
3849 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3851 /* We're required to pad with trailing zeros if the requested
3852 len is greater than strlen(s2)+1. In that case try to
3853 use store_by_pieces, if it fails, punt. */
3854 if (tree_int_cst_lt (slen, len))
3856 unsigned int dest_align
3857 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3858 const char *p = c_getstr (src);
3859 rtx dest_mem;
3861 if (!p || dest_align == 0 || !host_integerp (len, 1)
3862 || !can_store_by_pieces (tree_low_cst (len, 1),
3863 builtin_strncpy_read_str,
3864 CONST_CAST (char *, p),
3865 dest_align, false))
3866 return NULL_RTX;
3868 dest_mem = get_memory_rtx (dest, len);
3869 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3870 builtin_strncpy_read_str,
3871 CONST_CAST (char *, p), dest_align, false, 0);
3872 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3873 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3874 return dest_mem;
3877 return NULL_RTX;
3880 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3881 bytes from constant string DATA + OFFSET and return it as target
3882 constant. */
3885 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3886 enum machine_mode mode)
3888 const char *c = (const char *) data;
3889 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3891 memset (p, *c, GET_MODE_SIZE (mode));
3893 return c_readstr (p, mode);
3896 /* Callback routine for store_by_pieces. Return the RTL of a register
3897 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3898 char value given in the RTL register data. For example, if mode is
3899 4 bytes wide, return the RTL for 0x01010101*data. */
3901 static rtx
3902 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3903 enum machine_mode mode)
3905 rtx target, coeff;
3906 size_t size;
3907 char *p;
3909 size = GET_MODE_SIZE (mode);
3910 if (size == 1)
3911 return (rtx) data;
3913 p = XALLOCAVEC (char, size);
3914 memset (p, 1, size);
3915 coeff = c_readstr (p, mode);
3917 target = convert_to_mode (mode, (rtx) data, 1);
3918 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3919 return force_reg (mode, target);
3922 /* Expand expression EXP, which is a call to the memset builtin. Return
3923 NULL_RTX if we failed the caller should emit a normal call, otherwise
3924 try to get the result in TARGET, if convenient (and in mode MODE if that's
3925 convenient). */
3927 static rtx
3928 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3930 if (!validate_arglist (exp,
3931 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3932 return NULL_RTX;
3933 else
3935 tree dest = CALL_EXPR_ARG (exp, 0);
3936 tree val = CALL_EXPR_ARG (exp, 1);
3937 tree len = CALL_EXPR_ARG (exp, 2);
3938 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3942 /* Helper function to do the actual work for expand_builtin_memset. The
3943 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3944 so that this can also be called without constructing an actual CALL_EXPR.
3945 The other arguments and return value are the same as for
3946 expand_builtin_memset. */
3948 static rtx
3949 expand_builtin_memset_args (tree dest, tree val, tree len,
3950 rtx target, enum machine_mode mode, tree orig_exp)
3952 tree fndecl, fn;
3953 enum built_in_function fcode;
3954 char c;
3955 unsigned int dest_align;
3956 rtx dest_mem, dest_addr, len_rtx;
3957 HOST_WIDE_INT expected_size = -1;
3958 unsigned int expected_align = 0;
3959 tree_ann_common_t ann;
3961 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3963 /* If DEST is not a pointer type, don't do this operation in-line. */
3964 if (dest_align == 0)
3965 return NULL_RTX;
3967 ann = tree_common_ann (orig_exp);
3968 if (ann)
3969 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3971 if (expected_align < dest_align)
3972 expected_align = dest_align;
3974 /* If the LEN parameter is zero, return DEST. */
3975 if (integer_zerop (len))
3977 /* Evaluate and ignore VAL in case it has side-effects. */
3978 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3979 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3982 /* Stabilize the arguments in case we fail. */
3983 dest = builtin_save_expr (dest);
3984 val = builtin_save_expr (val);
3985 len = builtin_save_expr (len);
3987 len_rtx = expand_normal (len);
3988 dest_mem = get_memory_rtx (dest, len);
3990 if (TREE_CODE (val) != INTEGER_CST)
3992 rtx val_rtx;
3994 val_rtx = expand_normal (val);
3995 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3996 val_rtx, 0);
3998 /* Assume that we can memset by pieces if we can store
3999 * the coefficients by pieces (in the required modes).
4000 * We can't pass builtin_memset_gen_str as that emits RTL. */
4001 c = 1;
4002 if (host_integerp (len, 1)
4003 && can_store_by_pieces (tree_low_cst (len, 1),
4004 builtin_memset_read_str, &c, dest_align,
4005 true))
4007 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4008 val_rtx);
4009 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4010 builtin_memset_gen_str, val_rtx, dest_align,
4011 true, 0);
4013 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4014 dest_align, expected_align,
4015 expected_size))
4016 goto do_libcall;
4018 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4019 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4020 return dest_mem;
4023 if (target_char_cast (val, &c))
4024 goto do_libcall;
4026 if (c)
4028 if (host_integerp (len, 1)
4029 && can_store_by_pieces (tree_low_cst (len, 1),
4030 builtin_memset_read_str, &c, dest_align,
4031 true))
4032 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4033 builtin_memset_read_str, &c, dest_align, true, 0);
4034 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4035 dest_align, expected_align,
4036 expected_size))
4037 goto do_libcall;
4039 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4040 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4041 return dest_mem;
4044 set_mem_align (dest_mem, dest_align);
4045 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4046 CALL_EXPR_TAILCALL (orig_exp)
4047 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4048 expected_align, expected_size);
4050 if (dest_addr == 0)
4052 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4053 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4056 return dest_addr;
4058 do_libcall:
4059 fndecl = get_callee_fndecl (orig_exp);
4060 fcode = DECL_FUNCTION_CODE (fndecl);
4061 if (fcode == BUILT_IN_MEMSET)
4062 fn = build_call_expr (fndecl, 3, dest, val, len);
4063 else if (fcode == BUILT_IN_BZERO)
4064 fn = build_call_expr (fndecl, 2, dest, len);
4065 else
4066 gcc_unreachable ();
4067 if (TREE_CODE (fn) == CALL_EXPR)
4068 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4069 return expand_call (fn, target, target == const0_rtx);
4072 /* Expand expression EXP, which is a call to the bzero builtin. Return
4073 NULL_RTX if we failed the caller should emit a normal call. */
4075 static rtx
4076 expand_builtin_bzero (tree exp)
4078 tree dest, size;
4080 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4081 return NULL_RTX;
4083 dest = CALL_EXPR_ARG (exp, 0);
4084 size = CALL_EXPR_ARG (exp, 1);
4086 /* New argument list transforming bzero(ptr x, int y) to
4087 memset(ptr x, int 0, size_t y). This is done this way
4088 so that if it isn't expanded inline, we fallback to
4089 calling bzero instead of memset. */
4091 return expand_builtin_memset_args (dest, integer_zero_node,
4092 fold_convert (sizetype, size),
4093 const0_rtx, VOIDmode, exp);
4096 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4097 caller should emit a normal call, otherwise try to get the result
4098 in TARGET, if convenient (and in mode MODE if that's convenient). */
4100 static rtx
4101 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4103 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4104 INTEGER_TYPE, VOID_TYPE))
4106 tree type = TREE_TYPE (exp);
4107 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4108 CALL_EXPR_ARG (exp, 1),
4109 CALL_EXPR_ARG (exp, 2), type);
4110 if (result)
4111 return expand_expr (result, target, mode, EXPAND_NORMAL);
4113 return NULL_RTX;
4116 /* Expand expression EXP, which is a call to the memcmp built-in function.
4117 Return NULL_RTX if we failed and the
4118 caller should emit a normal call, otherwise try to get the result in
4119 TARGET, if convenient (and in mode MODE, if that's convenient). */
4121 static rtx
4122 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4124 if (!validate_arglist (exp,
4125 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4126 return NULL_RTX;
4127 else
4129 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4130 CALL_EXPR_ARG (exp, 1),
4131 CALL_EXPR_ARG (exp, 2));
4132 if (result)
4133 return expand_expr (result, target, mode, EXPAND_NORMAL);
4136 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4138 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4139 rtx result;
4140 rtx insn;
4141 tree arg1 = CALL_EXPR_ARG (exp, 0);
4142 tree arg2 = CALL_EXPR_ARG (exp, 1);
4143 tree len = CALL_EXPR_ARG (exp, 2);
4145 int arg1_align
4146 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4147 int arg2_align
4148 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4149 enum machine_mode insn_mode;
4151 #ifdef HAVE_cmpmemsi
4152 if (HAVE_cmpmemsi)
4153 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4154 else
4155 #endif
4156 #ifdef HAVE_cmpstrnsi
4157 if (HAVE_cmpstrnsi)
4158 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4159 else
4160 #endif
4161 return NULL_RTX;
4163 /* If we don't have POINTER_TYPE, call the function. */
4164 if (arg1_align == 0 || arg2_align == 0)
4165 return NULL_RTX;
4167 /* Make a place to write the result of the instruction. */
4168 result = target;
4169 if (! (result != 0
4170 && REG_P (result) && GET_MODE (result) == insn_mode
4171 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4172 result = gen_reg_rtx (insn_mode);
4174 arg1_rtx = get_memory_rtx (arg1, len);
4175 arg2_rtx = get_memory_rtx (arg2, len);
4176 arg3_rtx = expand_normal (len);
4178 /* Set MEM_SIZE as appropriate. */
4179 if (GET_CODE (arg3_rtx) == CONST_INT)
4181 set_mem_size (arg1_rtx, arg3_rtx);
4182 set_mem_size (arg2_rtx, arg3_rtx);
4185 #ifdef HAVE_cmpmemsi
4186 if (HAVE_cmpmemsi)
4187 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4188 GEN_INT (MIN (arg1_align, arg2_align)));
4189 else
4190 #endif
4191 #ifdef HAVE_cmpstrnsi
4192 if (HAVE_cmpstrnsi)
4193 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4194 GEN_INT (MIN (arg1_align, arg2_align)));
4195 else
4196 #endif
4197 gcc_unreachable ();
4199 if (insn)
4200 emit_insn (insn);
4201 else
4202 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4203 TYPE_MODE (integer_type_node), 3,
4204 XEXP (arg1_rtx, 0), Pmode,
4205 XEXP (arg2_rtx, 0), Pmode,
4206 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4207 TYPE_UNSIGNED (sizetype)),
4208 TYPE_MODE (sizetype));
4210 /* Return the value in the proper mode for this function. */
4211 mode = TYPE_MODE (TREE_TYPE (exp));
4212 if (GET_MODE (result) == mode)
4213 return result;
4214 else if (target != 0)
4216 convert_move (target, result, 0);
4217 return target;
4219 else
4220 return convert_to_mode (mode, result, 0);
4222 #endif
4224 return NULL_RTX;
4227 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4228 if we failed the caller should emit a normal call, otherwise try to get
4229 the result in TARGET, if convenient. */
4231 static rtx
4232 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4234 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4235 return NULL_RTX;
4236 else
4238 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4239 CALL_EXPR_ARG (exp, 1));
4240 if (result)
4241 return expand_expr (result, target, mode, EXPAND_NORMAL);
4244 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4245 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4246 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4248 rtx arg1_rtx, arg2_rtx;
4249 rtx result, insn = NULL_RTX;
4250 tree fndecl, fn;
4251 tree arg1 = CALL_EXPR_ARG (exp, 0);
4252 tree arg2 = CALL_EXPR_ARG (exp, 1);
4254 int arg1_align
4255 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4256 int arg2_align
4257 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4259 /* If we don't have POINTER_TYPE, call the function. */
4260 if (arg1_align == 0 || arg2_align == 0)
4261 return NULL_RTX;
4263 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4264 arg1 = builtin_save_expr (arg1);
4265 arg2 = builtin_save_expr (arg2);
4267 arg1_rtx = get_memory_rtx (arg1, NULL);
4268 arg2_rtx = get_memory_rtx (arg2, NULL);
4270 #ifdef HAVE_cmpstrsi
4271 /* Try to call cmpstrsi. */
4272 if (HAVE_cmpstrsi)
4274 enum machine_mode insn_mode
4275 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4277 /* Make a place to write the result of the instruction. */
4278 result = target;
4279 if (! (result != 0
4280 && REG_P (result) && GET_MODE (result) == insn_mode
4281 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4282 result = gen_reg_rtx (insn_mode);
4284 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4285 GEN_INT (MIN (arg1_align, arg2_align)));
4287 #endif
4288 #ifdef HAVE_cmpstrnsi
4289 /* Try to determine at least one length and call cmpstrnsi. */
4290 if (!insn && HAVE_cmpstrnsi)
4292 tree len;
4293 rtx arg3_rtx;
4295 enum machine_mode insn_mode
4296 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4297 tree len1 = c_strlen (arg1, 1);
4298 tree len2 = c_strlen (arg2, 1);
4300 if (len1)
4301 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4302 if (len2)
4303 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4305 /* If we don't have a constant length for the first, use the length
4306 of the second, if we know it. We don't require a constant for
4307 this case; some cost analysis could be done if both are available
4308 but neither is constant. For now, assume they're equally cheap,
4309 unless one has side effects. If both strings have constant lengths,
4310 use the smaller. */
4312 if (!len1)
4313 len = len2;
4314 else if (!len2)
4315 len = len1;
4316 else if (TREE_SIDE_EFFECTS (len1))
4317 len = len2;
4318 else if (TREE_SIDE_EFFECTS (len2))
4319 len = len1;
4320 else if (TREE_CODE (len1) != INTEGER_CST)
4321 len = len2;
4322 else if (TREE_CODE (len2) != INTEGER_CST)
4323 len = len1;
4324 else if (tree_int_cst_lt (len1, len2))
4325 len = len1;
4326 else
4327 len = len2;
4329 /* If both arguments have side effects, we cannot optimize. */
4330 if (!len || TREE_SIDE_EFFECTS (len))
4331 goto do_libcall;
4333 arg3_rtx = expand_normal (len);
4335 /* Make a place to write the result of the instruction. */
4336 result = target;
4337 if (! (result != 0
4338 && REG_P (result) && GET_MODE (result) == insn_mode
4339 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4340 result = gen_reg_rtx (insn_mode);
4342 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4343 GEN_INT (MIN (arg1_align, arg2_align)));
4345 #endif
4347 if (insn)
4349 emit_insn (insn);
4351 /* Return the value in the proper mode for this function. */
4352 mode = TYPE_MODE (TREE_TYPE (exp));
4353 if (GET_MODE (result) == mode)
4354 return result;
4355 if (target == 0)
4356 return convert_to_mode (mode, result, 0);
4357 convert_move (target, result, 0);
4358 return target;
4361 /* Expand the library call ourselves using a stabilized argument
4362 list to avoid re-evaluating the function's arguments twice. */
4363 #ifdef HAVE_cmpstrnsi
4364 do_libcall:
4365 #endif
4366 fndecl = get_callee_fndecl (exp);
4367 fn = build_call_expr (fndecl, 2, arg1, arg2);
4368 if (TREE_CODE (fn) == CALL_EXPR)
4369 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4370 return expand_call (fn, target, target == const0_rtx);
4372 #endif
4373 return NULL_RTX;
4376 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4377 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4378 the result in TARGET, if convenient. */
4380 static rtx
4381 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4383 if (!validate_arglist (exp,
4384 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4385 return NULL_RTX;
4386 else
4388 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4389 CALL_EXPR_ARG (exp, 1),
4390 CALL_EXPR_ARG (exp, 2));
4391 if (result)
4392 return expand_expr (result, target, mode, EXPAND_NORMAL);
4395 /* If c_strlen can determine an expression for one of the string
4396 lengths, and it doesn't have side effects, then emit cmpstrnsi
4397 using length MIN(strlen(string)+1, arg3). */
4398 #ifdef HAVE_cmpstrnsi
4399 if (HAVE_cmpstrnsi)
4401 tree len, len1, len2;
4402 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4403 rtx result, insn;
4404 tree fndecl, fn;
4405 tree arg1 = CALL_EXPR_ARG (exp, 0);
4406 tree arg2 = CALL_EXPR_ARG (exp, 1);
4407 tree arg3 = CALL_EXPR_ARG (exp, 2);
4409 int arg1_align
4410 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4411 int arg2_align
4412 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4413 enum machine_mode insn_mode
4414 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4416 len1 = c_strlen (arg1, 1);
4417 len2 = c_strlen (arg2, 1);
4419 if (len1)
4420 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4421 if (len2)
4422 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4424 /* If we don't have a constant length for the first, use the length
4425 of the second, if we know it. We don't require a constant for
4426 this case; some cost analysis could be done if both are available
4427 but neither is constant. For now, assume they're equally cheap,
4428 unless one has side effects. If both strings have constant lengths,
4429 use the smaller. */
4431 if (!len1)
4432 len = len2;
4433 else if (!len2)
4434 len = len1;
4435 else if (TREE_SIDE_EFFECTS (len1))
4436 len = len2;
4437 else if (TREE_SIDE_EFFECTS (len2))
4438 len = len1;
4439 else if (TREE_CODE (len1) != INTEGER_CST)
4440 len = len2;
4441 else if (TREE_CODE (len2) != INTEGER_CST)
4442 len = len1;
4443 else if (tree_int_cst_lt (len1, len2))
4444 len = len1;
4445 else
4446 len = len2;
4448 /* If both arguments have side effects, we cannot optimize. */
4449 if (!len || TREE_SIDE_EFFECTS (len))
4450 return NULL_RTX;
4452 /* The actual new length parameter is MIN(len,arg3). */
4453 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4454 fold_convert (TREE_TYPE (len), arg3));
4456 /* If we don't have POINTER_TYPE, call the function. */
4457 if (arg1_align == 0 || arg2_align == 0)
4458 return NULL_RTX;
4460 /* Make a place to write the result of the instruction. */
4461 result = target;
4462 if (! (result != 0
4463 && REG_P (result) && GET_MODE (result) == insn_mode
4464 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4465 result = gen_reg_rtx (insn_mode);
4467 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4468 arg1 = builtin_save_expr (arg1);
4469 arg2 = builtin_save_expr (arg2);
4470 len = builtin_save_expr (len);
4472 arg1_rtx = get_memory_rtx (arg1, len);
4473 arg2_rtx = get_memory_rtx (arg2, len);
4474 arg3_rtx = expand_normal (len);
4475 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4476 GEN_INT (MIN (arg1_align, arg2_align)));
4477 if (insn)
4479 emit_insn (insn);
4481 /* Return the value in the proper mode for this function. */
4482 mode = TYPE_MODE (TREE_TYPE (exp));
4483 if (GET_MODE (result) == mode)
4484 return result;
4485 if (target == 0)
4486 return convert_to_mode (mode, result, 0);
4487 convert_move (target, result, 0);
4488 return target;
4491 /* Expand the library call ourselves using a stabilized argument
4492 list to avoid re-evaluating the function's arguments twice. */
4493 fndecl = get_callee_fndecl (exp);
4494 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4495 if (TREE_CODE (fn) == CALL_EXPR)
4496 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4497 return expand_call (fn, target, target == const0_rtx);
4499 #endif
4500 return NULL_RTX;
4503 /* Expand expression EXP, which is a call to the strcat builtin.
4504 Return NULL_RTX if we failed the caller should emit a normal call,
4505 otherwise try to get the result in TARGET, if convenient. */
4507 static rtx
4508 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4510 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4511 return NULL_RTX;
4512 else
4514 tree dst = CALL_EXPR_ARG (exp, 0);
4515 tree src = CALL_EXPR_ARG (exp, 1);
4516 const char *p = c_getstr (src);
4518 /* If the string length is zero, return the dst parameter. */
4519 if (p && *p == '\0')
4520 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4522 if (optimize_insn_for_speed_p ())
4524 /* See if we can store by pieces into (dst + strlen(dst)). */
4525 tree newsrc, newdst,
4526 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4527 rtx insns;
4529 /* Stabilize the argument list. */
4530 newsrc = builtin_save_expr (src);
4531 dst = builtin_save_expr (dst);
4533 start_sequence ();
4535 /* Create strlen (dst). */
4536 newdst = build_call_expr (strlen_fn, 1, dst);
4537 /* Create (dst p+ strlen (dst)). */
4539 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4540 newdst = builtin_save_expr (newdst);
4542 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4544 end_sequence (); /* Stop sequence. */
4545 return NULL_RTX;
4548 /* Output the entire sequence. */
4549 insns = get_insns ();
4550 end_sequence ();
4551 emit_insn (insns);
4553 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4556 return NULL_RTX;
4560 /* Expand expression EXP, which is a call to the strncat builtin.
4561 Return NULL_RTX if we failed the caller should emit a normal call,
4562 otherwise try to get the result in TARGET, if convenient. */
4564 static rtx
4565 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4567 if (validate_arglist (exp,
4568 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4570 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4571 CALL_EXPR_ARG (exp, 1),
4572 CALL_EXPR_ARG (exp, 2));
4573 if (result)
4574 return expand_expr (result, target, mode, EXPAND_NORMAL);
4576 return NULL_RTX;
4579 /* Expand expression EXP, which is a call to the strspn builtin.
4580 Return NULL_RTX if we failed the caller should emit a normal call,
4581 otherwise try to get the result in TARGET, if convenient. */
4583 static rtx
4584 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4586 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4588 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4589 CALL_EXPR_ARG (exp, 1));
4590 if (result)
4591 return expand_expr (result, target, mode, EXPAND_NORMAL);
4593 return NULL_RTX;
4596 /* Expand expression EXP, which is a call to the strcspn builtin.
4597 Return NULL_RTX if we failed the caller should emit a normal call,
4598 otherwise try to get the result in TARGET, if convenient. */
4600 static rtx
4601 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4603 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4605 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4606 CALL_EXPR_ARG (exp, 1));
4607 if (result)
4608 return expand_expr (result, target, mode, EXPAND_NORMAL);
4610 return NULL_RTX;
4613 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4614 if that's convenient. */
4617 expand_builtin_saveregs (void)
4619 rtx val, seq;
4621 /* Don't do __builtin_saveregs more than once in a function.
4622 Save the result of the first call and reuse it. */
4623 if (saveregs_value != 0)
4624 return saveregs_value;
4626 /* When this function is called, it means that registers must be
4627 saved on entry to this function. So we migrate the call to the
4628 first insn of this function. */
4630 start_sequence ();
4632 /* Do whatever the machine needs done in this case. */
4633 val = targetm.calls.expand_builtin_saveregs ();
4635 seq = get_insns ();
4636 end_sequence ();
4638 saveregs_value = val;
4640 /* Put the insns after the NOTE that starts the function. If this
4641 is inside a start_sequence, make the outer-level insn chain current, so
4642 the code is placed at the start of the function. */
4643 push_topmost_sequence ();
4644 emit_insn_after (seq, entry_of_function ());
4645 pop_topmost_sequence ();
4647 return val;
4650 /* __builtin_args_info (N) returns word N of the arg space info
4651 for the current function. The number and meanings of words
4652 is controlled by the definition of CUMULATIVE_ARGS. */
4654 static rtx
4655 expand_builtin_args_info (tree exp)
4657 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4658 int *word_ptr = (int *) &crtl->args.info;
4660 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4662 if (call_expr_nargs (exp) != 0)
4664 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4665 error ("argument of %<__builtin_args_info%> must be constant");
4666 else
4668 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4670 if (wordnum < 0 || wordnum >= nwords)
4671 error ("argument of %<__builtin_args_info%> out of range");
4672 else
4673 return GEN_INT (word_ptr[wordnum]);
4676 else
4677 error ("missing argument in %<__builtin_args_info%>");
4679 return const0_rtx;
4682 /* Expand a call to __builtin_next_arg. */
4684 static rtx
4685 expand_builtin_next_arg (void)
4687 /* Checking arguments is already done in fold_builtin_next_arg
4688 that must be called before this function. */
4689 return expand_binop (ptr_mode, add_optab,
4690 crtl->args.internal_arg_pointer,
4691 crtl->args.arg_offset_rtx,
4692 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4695 /* Make it easier for the backends by protecting the valist argument
4696 from multiple evaluations. */
4698 static tree
4699 stabilize_va_list (tree valist, int needs_lvalue)
4701 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4703 gcc_assert (vatype != NULL_TREE);
4705 if (TREE_CODE (vatype) == ARRAY_TYPE)
4707 if (TREE_SIDE_EFFECTS (valist))
4708 valist = save_expr (valist);
4710 /* For this case, the backends will be expecting a pointer to
4711 vatype, but it's possible we've actually been given an array
4712 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4713 So fix it. */
4714 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4716 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4717 valist = build_fold_addr_expr_with_type (valist, p1);
4720 else
4722 tree pt;
4724 if (! needs_lvalue)
4726 if (! TREE_SIDE_EFFECTS (valist))
4727 return valist;
4729 pt = build_pointer_type (vatype);
4730 valist = fold_build1 (ADDR_EXPR, pt, valist);
4731 TREE_SIDE_EFFECTS (valist) = 1;
4734 if (TREE_SIDE_EFFECTS (valist))
4735 valist = save_expr (valist);
4736 valist = build_fold_indirect_ref (valist);
4739 return valist;
4742 /* The "standard" definition of va_list is void*. */
4744 tree
4745 std_build_builtin_va_list (void)
4747 return ptr_type_node;
4750 /* The "standard" abi va_list is va_list_type_node. */
4752 tree
4753 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4755 return va_list_type_node;
4758 /* The "standard" type of va_list is va_list_type_node. */
4760 tree
4761 std_canonical_va_list_type (tree type)
4763 tree wtype, htype;
4765 if (INDIRECT_REF_P (type))
4766 type = TREE_TYPE (type);
4767 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4768 type = TREE_TYPE (type);
4769 wtype = va_list_type_node;
4770 htype = type;
4771 /* Treat structure va_list types. */
4772 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4773 htype = TREE_TYPE (htype);
4774 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4776 /* If va_list is an array type, the argument may have decayed
4777 to a pointer type, e.g. by being passed to another function.
4778 In that case, unwrap both types so that we can compare the
4779 underlying records. */
4780 if (TREE_CODE (htype) == ARRAY_TYPE
4781 || POINTER_TYPE_P (htype))
4783 wtype = TREE_TYPE (wtype);
4784 htype = TREE_TYPE (htype);
4787 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4788 return va_list_type_node;
4790 return NULL_TREE;
4793 /* The "standard" implementation of va_start: just assign `nextarg' to
4794 the variable. */
4796 void
4797 std_expand_builtin_va_start (tree valist, rtx nextarg)
4799 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4800 convert_move (va_r, nextarg, 0);
4803 /* Expand EXP, a call to __builtin_va_start. */
4805 static rtx
4806 expand_builtin_va_start (tree exp)
4808 rtx nextarg;
4809 tree valist;
4811 if (call_expr_nargs (exp) < 2)
4813 error ("too few arguments to function %<va_start%>");
4814 return const0_rtx;
4817 if (fold_builtin_next_arg (exp, true))
4818 return const0_rtx;
4820 nextarg = expand_builtin_next_arg ();
4821 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4823 if (targetm.expand_builtin_va_start)
4824 targetm.expand_builtin_va_start (valist, nextarg);
4825 else
4826 std_expand_builtin_va_start (valist, nextarg);
4828 return const0_rtx;
4831 /* The "standard" implementation of va_arg: read the value from the
4832 current (padded) address and increment by the (padded) size. */
4834 tree
4835 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4836 gimple_seq *post_p)
4838 tree addr, t, type_size, rounded_size, valist_tmp;
4839 unsigned HOST_WIDE_INT align, boundary;
4840 bool indirect;
4842 #ifdef ARGS_GROW_DOWNWARD
4843 /* All of the alignment and movement below is for args-grow-up machines.
4844 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4845 implement their own specialized gimplify_va_arg_expr routines. */
4846 gcc_unreachable ();
4847 #endif
4849 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4850 if (indirect)
4851 type = build_pointer_type (type);
4853 align = PARM_BOUNDARY / BITS_PER_UNIT;
4854 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4856 /* When we align parameter on stack for caller, if the parameter
4857 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4858 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4859 here with caller. */
4860 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4861 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4863 boundary /= BITS_PER_UNIT;
4865 /* Hoist the valist value into a temporary for the moment. */
4866 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4868 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4869 requires greater alignment, we must perform dynamic alignment. */
4870 if (boundary > align
4871 && !integer_zerop (TYPE_SIZE (type)))
4873 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4874 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4875 valist_tmp, size_int (boundary - 1)));
4876 gimplify_and_add (t, pre_p);
4878 t = fold_convert (sizetype, valist_tmp);
4879 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4880 fold_convert (TREE_TYPE (valist),
4881 fold_build2 (BIT_AND_EXPR, sizetype, t,
4882 size_int (-boundary))));
4883 gimplify_and_add (t, pre_p);
4885 else
4886 boundary = align;
4888 /* If the actual alignment is less than the alignment of the type,
4889 adjust the type accordingly so that we don't assume strict alignment
4890 when dereferencing the pointer. */
4891 boundary *= BITS_PER_UNIT;
4892 if (boundary < TYPE_ALIGN (type))
4894 type = build_variant_type_copy (type);
4895 TYPE_ALIGN (type) = boundary;
4898 /* Compute the rounded size of the type. */
4899 type_size = size_in_bytes (type);
4900 rounded_size = round_up (type_size, align);
4902 /* Reduce rounded_size so it's sharable with the postqueue. */
4903 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4905 /* Get AP. */
4906 addr = valist_tmp;
4907 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4909 /* Small args are padded downward. */
4910 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4911 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4912 size_binop (MINUS_EXPR, rounded_size, type_size));
4913 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4916 /* Compute new value for AP. */
4917 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4918 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4919 gimplify_and_add (t, pre_p);
4921 addr = fold_convert (build_pointer_type (type), addr);
4923 if (indirect)
4924 addr = build_va_arg_indirect_ref (addr);
4926 return build_va_arg_indirect_ref (addr);
4929 /* Build an indirect-ref expression over the given TREE, which represents a
4930 piece of a va_arg() expansion. */
4931 tree
4932 build_va_arg_indirect_ref (tree addr)
4934 addr = build_fold_indirect_ref (addr);
4936 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4937 mf_mark (addr);
4939 return addr;
4942 /* Return a dummy expression of type TYPE in order to keep going after an
4943 error. */
4945 static tree
4946 dummy_object (tree type)
4948 tree t = build_int_cst (build_pointer_type (type), 0);
4949 return build1 (INDIRECT_REF, type, t);
4952 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4953 builtin function, but a very special sort of operator. */
4955 enum gimplify_status
4956 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4958 tree promoted_type, have_va_type;
4959 tree valist = TREE_OPERAND (*expr_p, 0);
4960 tree type = TREE_TYPE (*expr_p);
4961 tree t;
4963 /* Verify that valist is of the proper type. */
4964 have_va_type = TREE_TYPE (valist);
4965 if (have_va_type == error_mark_node)
4966 return GS_ERROR;
4967 have_va_type = targetm.canonical_va_list_type (have_va_type);
4969 if (have_va_type == NULL_TREE)
4971 error ("first argument to %<va_arg%> not of type %<va_list%>");
4972 return GS_ERROR;
4975 /* Generate a diagnostic for requesting data of a type that cannot
4976 be passed through `...' due to type promotion at the call site. */
4977 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4978 != type)
4980 static bool gave_help;
4981 bool warned;
4983 /* Unfortunately, this is merely undefined, rather than a constraint
4984 violation, so we cannot make this an error. If this call is never
4985 executed, the program is still strictly conforming. */
4986 warned = warning (0, "%qT is promoted to %qT when passed through %<...%>",
4987 type, promoted_type);
4988 if (!gave_help && warned)
4990 gave_help = true;
4991 inform (input_location, "(so you should pass %qT not %qT to %<va_arg%>)",
4992 promoted_type, type);
4995 /* We can, however, treat "undefined" any way we please.
4996 Call abort to encourage the user to fix the program. */
4997 if (warned)
4998 inform (input_location, "if this code is reached, the program will abort");
4999 /* Before the abort, allow the evaluation of the va_list
5000 expression to exit or longjmp. */
5001 gimplify_and_add (valist, pre_p);
5002 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
5003 gimplify_and_add (t, pre_p);
5005 /* This is dead code, but go ahead and finish so that the
5006 mode of the result comes out right. */
5007 *expr_p = dummy_object (type);
5008 return GS_ALL_DONE;
5010 else
5012 /* Make it easier for the backends by protecting the valist argument
5013 from multiple evaluations. */
5014 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5016 /* For this case, the backends will be expecting a pointer to
5017 TREE_TYPE (abi), but it's possible we've
5018 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5019 So fix it. */
5020 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5022 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5023 valist = build_fold_addr_expr_with_type (valist, p1);
5026 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5028 else
5029 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5031 if (!targetm.gimplify_va_arg_expr)
5032 /* FIXME: Once most targets are converted we should merely
5033 assert this is non-null. */
5034 return GS_ALL_DONE;
5036 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5037 return GS_OK;
5041 /* Expand EXP, a call to __builtin_va_end. */
5043 static rtx
5044 expand_builtin_va_end (tree exp)
5046 tree valist = CALL_EXPR_ARG (exp, 0);
5048 /* Evaluate for side effects, if needed. I hate macros that don't
5049 do that. */
5050 if (TREE_SIDE_EFFECTS (valist))
5051 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5053 return const0_rtx;
5056 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5057 builtin rather than just as an assignment in stdarg.h because of the
5058 nastiness of array-type va_list types. */
5060 static rtx
5061 expand_builtin_va_copy (tree exp)
5063 tree dst, src, t;
5065 dst = CALL_EXPR_ARG (exp, 0);
5066 src = CALL_EXPR_ARG (exp, 1);
5068 dst = stabilize_va_list (dst, 1);
5069 src = stabilize_va_list (src, 0);
5071 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5073 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5075 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5076 TREE_SIDE_EFFECTS (t) = 1;
5077 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5079 else
5081 rtx dstb, srcb, size;
5083 /* Evaluate to pointers. */
5084 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5085 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5086 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5087 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5089 dstb = convert_memory_address (Pmode, dstb);
5090 srcb = convert_memory_address (Pmode, srcb);
5092 /* "Dereference" to BLKmode memories. */
5093 dstb = gen_rtx_MEM (BLKmode, dstb);
5094 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5095 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5096 srcb = gen_rtx_MEM (BLKmode, srcb);
5097 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5098 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5100 /* Copy. */
5101 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5104 return const0_rtx;
5107 /* Expand a call to one of the builtin functions __builtin_frame_address or
5108 __builtin_return_address. */
5110 static rtx
5111 expand_builtin_frame_address (tree fndecl, tree exp)
5113 /* The argument must be a nonnegative integer constant.
5114 It counts the number of frames to scan up the stack.
5115 The value is the return address saved in that frame. */
5116 if (call_expr_nargs (exp) == 0)
5117 /* Warning about missing arg was already issued. */
5118 return const0_rtx;
5119 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5121 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5122 error ("invalid argument to %<__builtin_frame_address%>");
5123 else
5124 error ("invalid argument to %<__builtin_return_address%>");
5125 return const0_rtx;
5127 else
5129 rtx tem
5130 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5131 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5133 /* Some ports cannot access arbitrary stack frames. */
5134 if (tem == NULL)
5136 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5137 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5138 else
5139 warning (0, "unsupported argument to %<__builtin_return_address%>");
5140 return const0_rtx;
5143 /* For __builtin_frame_address, return what we've got. */
5144 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5145 return tem;
5147 if (!REG_P (tem)
5148 && ! CONSTANT_P (tem))
5149 tem = copy_to_mode_reg (Pmode, tem);
5150 return tem;
5154 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5155 we failed and the caller should emit a normal call, otherwise try to get
5156 the result in TARGET, if convenient. */
5158 static rtx
5159 expand_builtin_alloca (tree exp, rtx target)
5161 rtx op0;
5162 rtx result;
5164 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5165 should always expand to function calls. These can be intercepted
5166 in libmudflap. */
5167 if (flag_mudflap)
5168 return NULL_RTX;
5170 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5171 return NULL_RTX;
5173 /* Compute the argument. */
5174 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5176 /* Allocate the desired space. */
5177 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5178 result = convert_memory_address (ptr_mode, result);
5180 return result;
5183 /* Expand a call to a bswap builtin with argument ARG0. MODE
5184 is the mode to expand with. */
5186 static rtx
5187 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5189 enum machine_mode mode;
5190 tree arg;
5191 rtx op0;
5193 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5194 return NULL_RTX;
5196 arg = CALL_EXPR_ARG (exp, 0);
5197 mode = TYPE_MODE (TREE_TYPE (arg));
5198 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5200 target = expand_unop (mode, bswap_optab, op0, target, 1);
5202 gcc_assert (target);
5204 return convert_to_mode (mode, target, 0);
5207 /* Expand a call to a unary builtin in EXP.
5208 Return NULL_RTX if a normal call should be emitted rather than expanding the
5209 function in-line. If convenient, the result should be placed in TARGET.
5210 SUBTARGET may be used as the target for computing one of EXP's operands. */
5212 static rtx
5213 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5214 rtx subtarget, optab op_optab)
5216 rtx op0;
5218 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5219 return NULL_RTX;
5221 /* Compute the argument. */
5222 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5223 VOIDmode, EXPAND_NORMAL);
5224 /* Compute op, into TARGET if possible.
5225 Set TARGET to wherever the result comes back. */
5226 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5227 op_optab, op0, target, 1);
5228 gcc_assert (target);
5230 return convert_to_mode (target_mode, target, 0);
5233 /* If the string passed to fputs is a constant and is one character
5234 long, we attempt to transform this call into __builtin_fputc(). */
5236 static rtx
5237 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5239 /* Verify the arguments in the original call. */
5240 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5242 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5243 CALL_EXPR_ARG (exp, 1),
5244 (target == const0_rtx),
5245 unlocked, NULL_TREE);
5246 if (result)
5247 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5249 return NULL_RTX;
5252 /* Expand a call to __builtin_expect. We just return our argument
5253 as the builtin_expect semantic should've been already executed by
5254 tree branch prediction pass. */
5256 static rtx
5257 expand_builtin_expect (tree exp, rtx target)
5259 tree arg, c;
5261 if (call_expr_nargs (exp) < 2)
5262 return const0_rtx;
5263 arg = CALL_EXPR_ARG (exp, 0);
5264 c = CALL_EXPR_ARG (exp, 1);
5266 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5267 /* When guessing was done, the hints should be already stripped away. */
5268 gcc_assert (!flag_guess_branch_prob
5269 || optimize == 0 || errorcount || sorrycount);
5270 return target;
5273 void
5274 expand_builtin_trap (void)
5276 #ifdef HAVE_trap
5277 if (HAVE_trap)
5278 emit_insn (gen_trap ());
5279 else
5280 #endif
5281 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5282 emit_barrier ();
5285 /* Expand EXP, a call to fabs, fabsf or fabsl.
5286 Return NULL_RTX if a normal call should be emitted rather than expanding
5287 the function inline. If convenient, the result should be placed
5288 in TARGET. SUBTARGET may be used as the target for computing
5289 the operand. */
5291 static rtx
5292 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5294 enum machine_mode mode;
5295 tree arg;
5296 rtx op0;
5298 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5299 return NULL_RTX;
5301 arg = CALL_EXPR_ARG (exp, 0);
5302 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5303 mode = TYPE_MODE (TREE_TYPE (arg));
5304 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5305 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5308 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5309 Return NULL is a normal call should be emitted rather than expanding the
5310 function inline. If convenient, the result should be placed in TARGET.
5311 SUBTARGET may be used as the target for computing the operand. */
5313 static rtx
5314 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5316 rtx op0, op1;
5317 tree arg;
5319 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5320 return NULL_RTX;
5322 arg = CALL_EXPR_ARG (exp, 0);
5323 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5325 arg = CALL_EXPR_ARG (exp, 1);
5326 op1 = expand_normal (arg);
5328 return expand_copysign (op0, op1, target);
5331 /* Create a new constant string literal and return a char* pointer to it.
5332 The STRING_CST value is the LEN characters at STR. */
5333 tree
5334 build_string_literal (int len, const char *str)
5336 tree t, elem, index, type;
5338 t = build_string (len, str);
5339 elem = build_type_variant (char_type_node, 1, 0);
5340 index = build_index_type (size_int (len - 1));
5341 type = build_array_type (elem, index);
5342 TREE_TYPE (t) = type;
5343 TREE_CONSTANT (t) = 1;
5344 TREE_READONLY (t) = 1;
5345 TREE_STATIC (t) = 1;
5347 type = build_pointer_type (elem);
5348 t = build1 (ADDR_EXPR, type,
5349 build4 (ARRAY_REF, elem,
5350 t, integer_zero_node, NULL_TREE, NULL_TREE));
5351 return t;
5354 /* Expand EXP, a call to printf or printf_unlocked.
5355 Return NULL_RTX if a normal call should be emitted rather than transforming
5356 the function inline. If convenient, the result should be placed in
5357 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5358 call. */
5359 static rtx
5360 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5361 bool unlocked)
5363 /* If we're using an unlocked function, assume the other unlocked
5364 functions exist explicitly. */
5365 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5366 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5367 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5368 : implicit_built_in_decls[BUILT_IN_PUTS];
5369 const char *fmt_str;
5370 tree fn = 0;
5371 tree fmt, arg;
5372 int nargs = call_expr_nargs (exp);
5374 /* If the return value is used, don't do the transformation. */
5375 if (target != const0_rtx)
5376 return NULL_RTX;
5378 /* Verify the required arguments in the original call. */
5379 if (nargs == 0)
5380 return NULL_RTX;
5381 fmt = CALL_EXPR_ARG (exp, 0);
5382 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5383 return NULL_RTX;
5385 /* Check whether the format is a literal string constant. */
5386 fmt_str = c_getstr (fmt);
5387 if (fmt_str == NULL)
5388 return NULL_RTX;
5390 if (!init_target_chars ())
5391 return NULL_RTX;
5393 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5394 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5396 if ((nargs != 2)
5397 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5398 return NULL_RTX;
5399 if (fn_puts)
5400 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5402 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5403 else if (strcmp (fmt_str, target_percent_c) == 0)
5405 if ((nargs != 2)
5406 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5407 return NULL_RTX;
5408 if (fn_putchar)
5409 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5411 else
5413 /* We can't handle anything else with % args or %% ... yet. */
5414 if (strchr (fmt_str, target_percent))
5415 return NULL_RTX;
5417 if (nargs > 1)
5418 return NULL_RTX;
5420 /* If the format specifier was "", printf does nothing. */
5421 if (fmt_str[0] == '\0')
5422 return const0_rtx;
5423 /* If the format specifier has length of 1, call putchar. */
5424 if (fmt_str[1] == '\0')
5426 /* Given printf("c"), (where c is any one character,)
5427 convert "c"[0] to an int and pass that to the replacement
5428 function. */
5429 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5430 if (fn_putchar)
5431 fn = build_call_expr (fn_putchar, 1, arg);
5433 else
5435 /* If the format specifier was "string\n", call puts("string"). */
5436 size_t len = strlen (fmt_str);
5437 if ((unsigned char)fmt_str[len - 1] == target_newline)
5439 /* Create a NUL-terminated string that's one char shorter
5440 than the original, stripping off the trailing '\n'. */
5441 char *newstr = XALLOCAVEC (char, len);
5442 memcpy (newstr, fmt_str, len - 1);
5443 newstr[len - 1] = 0;
5444 arg = build_string_literal (len, newstr);
5445 if (fn_puts)
5446 fn = build_call_expr (fn_puts, 1, arg);
5448 else
5449 /* We'd like to arrange to call fputs(string,stdout) here,
5450 but we need stdout and don't have a way to get it yet. */
5451 return NULL_RTX;
5455 if (!fn)
5456 return NULL_RTX;
5457 if (TREE_CODE (fn) == CALL_EXPR)
5458 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5459 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5462 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5463 Return NULL_RTX if a normal call should be emitted rather than transforming
5464 the function inline. If convenient, the result should be placed in
5465 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5466 call. */
5467 static rtx
5468 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5469 bool unlocked)
5471 /* If we're using an unlocked function, assume the other unlocked
5472 functions exist explicitly. */
5473 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5474 : implicit_built_in_decls[BUILT_IN_FPUTC];
5475 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5476 : implicit_built_in_decls[BUILT_IN_FPUTS];
5477 const char *fmt_str;
5478 tree fn = 0;
5479 tree fmt, fp, arg;
5480 int nargs = call_expr_nargs (exp);
5482 /* If the return value is used, don't do the transformation. */
5483 if (target != const0_rtx)
5484 return NULL_RTX;
5486 /* Verify the required arguments in the original call. */
5487 if (nargs < 2)
5488 return NULL_RTX;
5489 fp = CALL_EXPR_ARG (exp, 0);
5490 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5491 return NULL_RTX;
5492 fmt = CALL_EXPR_ARG (exp, 1);
5493 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5494 return NULL_RTX;
5496 /* Check whether the format is a literal string constant. */
5497 fmt_str = c_getstr (fmt);
5498 if (fmt_str == NULL)
5499 return NULL_RTX;
5501 if (!init_target_chars ())
5502 return NULL_RTX;
5504 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5505 if (strcmp (fmt_str, target_percent_s) == 0)
5507 if ((nargs != 3)
5508 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5509 return NULL_RTX;
5510 arg = CALL_EXPR_ARG (exp, 2);
5511 if (fn_fputs)
5512 fn = build_call_expr (fn_fputs, 2, arg, fp);
5514 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5515 else if (strcmp (fmt_str, target_percent_c) == 0)
5517 if ((nargs != 3)
5518 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5519 return NULL_RTX;
5520 arg = CALL_EXPR_ARG (exp, 2);
5521 if (fn_fputc)
5522 fn = build_call_expr (fn_fputc, 2, arg, fp);
5524 else
5526 /* We can't handle anything else with % args or %% ... yet. */
5527 if (strchr (fmt_str, target_percent))
5528 return NULL_RTX;
5530 if (nargs > 2)
5531 return NULL_RTX;
5533 /* If the format specifier was "", fprintf does nothing. */
5534 if (fmt_str[0] == '\0')
5536 /* Evaluate and ignore FILE* argument for side-effects. */
5537 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5538 return const0_rtx;
5541 /* When "string" doesn't contain %, replace all cases of
5542 fprintf(stream,string) with fputs(string,stream). The fputs
5543 builtin will take care of special cases like length == 1. */
5544 if (fn_fputs)
5545 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5548 if (!fn)
5549 return NULL_RTX;
5550 if (TREE_CODE (fn) == CALL_EXPR)
5551 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5552 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5555 /* Expand a call EXP to sprintf. Return NULL_RTX if
5556 a normal call should be emitted rather than expanding the function
5557 inline. If convenient, the result should be placed in TARGET with
5558 mode MODE. */
5560 static rtx
5561 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5563 tree dest, fmt;
5564 const char *fmt_str;
5565 int nargs = call_expr_nargs (exp);
5567 /* Verify the required arguments in the original call. */
5568 if (nargs < 2)
5569 return NULL_RTX;
5570 dest = CALL_EXPR_ARG (exp, 0);
5571 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5572 return NULL_RTX;
5573 fmt = CALL_EXPR_ARG (exp, 0);
5574 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5575 return NULL_RTX;
5577 /* Check whether the format is a literal string constant. */
5578 fmt_str = c_getstr (fmt);
5579 if (fmt_str == NULL)
5580 return NULL_RTX;
5582 if (!init_target_chars ())
5583 return NULL_RTX;
5585 /* If the format doesn't contain % args or %%, use strcpy. */
5586 if (strchr (fmt_str, target_percent) == 0)
5588 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5589 tree exp;
5591 if ((nargs > 2) || ! fn)
5592 return NULL_RTX;
5593 expand_expr (build_call_expr (fn, 2, dest, fmt),
5594 const0_rtx, VOIDmode, EXPAND_NORMAL);
5595 if (target == const0_rtx)
5596 return const0_rtx;
5597 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5598 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5600 /* If the format is "%s", use strcpy if the result isn't used. */
5601 else if (strcmp (fmt_str, target_percent_s) == 0)
5603 tree fn, arg, len;
5604 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5606 if (! fn)
5607 return NULL_RTX;
5608 if (nargs != 3)
5609 return NULL_RTX;
5610 arg = CALL_EXPR_ARG (exp, 2);
5611 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5612 return NULL_RTX;
5614 if (target != const0_rtx)
5616 len = c_strlen (arg, 1);
5617 if (! len || TREE_CODE (len) != INTEGER_CST)
5618 return NULL_RTX;
5620 else
5621 len = NULL_TREE;
5623 expand_expr (build_call_expr (fn, 2, dest, arg),
5624 const0_rtx, VOIDmode, EXPAND_NORMAL);
5626 if (target == const0_rtx)
5627 return const0_rtx;
5628 return expand_expr (len, target, mode, EXPAND_NORMAL);
5631 return NULL_RTX;
5634 /* Expand a call to either the entry or exit function profiler. */
5636 static rtx
5637 expand_builtin_profile_func (bool exitp)
5639 rtx this_rtx, which;
5641 this_rtx = DECL_RTL (current_function_decl);
5642 gcc_assert (MEM_P (this_rtx));
5643 this_rtx = XEXP (this_rtx, 0);
5645 if (exitp)
5646 which = profile_function_exit_libfunc;
5647 else
5648 which = profile_function_entry_libfunc;
5650 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5651 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5653 Pmode);
5655 return const0_rtx;
5658 /* Expand a call to __builtin___clear_cache. */
5660 static rtx
5661 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5663 #ifndef HAVE_clear_cache
5664 #ifdef CLEAR_INSN_CACHE
5665 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5666 does something. Just do the default expansion to a call to
5667 __clear_cache(). */
5668 return NULL_RTX;
5669 #else
5670 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5671 does nothing. There is no need to call it. Do nothing. */
5672 return const0_rtx;
5673 #endif /* CLEAR_INSN_CACHE */
5674 #else
5675 /* We have a "clear_cache" insn, and it will handle everything. */
5676 tree begin, end;
5677 rtx begin_rtx, end_rtx;
5678 enum insn_code icode;
5680 /* We must not expand to a library call. If we did, any
5681 fallback library function in libgcc that might contain a call to
5682 __builtin___clear_cache() would recurse infinitely. */
5683 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5685 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5686 return const0_rtx;
5689 if (HAVE_clear_cache)
5691 icode = CODE_FOR_clear_cache;
5693 begin = CALL_EXPR_ARG (exp, 0);
5694 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5695 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5696 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5697 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5699 end = CALL_EXPR_ARG (exp, 1);
5700 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5701 end_rtx = convert_memory_address (Pmode, end_rtx);
5702 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5703 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5705 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5707 return const0_rtx;
5708 #endif /* HAVE_clear_cache */
5711 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5713 static rtx
5714 round_trampoline_addr (rtx tramp)
5716 rtx temp, addend, mask;
5718 /* If we don't need too much alignment, we'll have been guaranteed
5719 proper alignment by get_trampoline_type. */
5720 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5721 return tramp;
5723 /* Round address up to desired boundary. */
5724 temp = gen_reg_rtx (Pmode);
5725 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5726 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5728 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5729 temp, 0, OPTAB_LIB_WIDEN);
5730 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5731 temp, 0, OPTAB_LIB_WIDEN);
5733 return tramp;
5736 static rtx
5737 expand_builtin_init_trampoline (tree exp)
5739 tree t_tramp, t_func, t_chain;
5740 rtx r_tramp, r_func, r_chain;
5741 #ifdef TRAMPOLINE_TEMPLATE
5742 rtx blktramp;
5743 #endif
5745 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5746 POINTER_TYPE, VOID_TYPE))
5747 return NULL_RTX;
5749 t_tramp = CALL_EXPR_ARG (exp, 0);
5750 t_func = CALL_EXPR_ARG (exp, 1);
5751 t_chain = CALL_EXPR_ARG (exp, 2);
5753 r_tramp = expand_normal (t_tramp);
5754 r_func = expand_normal (t_func);
5755 r_chain = expand_normal (t_chain);
5757 /* Generate insns to initialize the trampoline. */
5758 r_tramp = round_trampoline_addr (r_tramp);
5759 #ifdef TRAMPOLINE_TEMPLATE
5760 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5761 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5762 emit_block_move (blktramp, assemble_trampoline_template (),
5763 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5764 #endif
5765 trampolines_created = 1;
5766 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5768 return const0_rtx;
5771 static rtx
5772 expand_builtin_adjust_trampoline (tree exp)
5774 rtx tramp;
5776 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5777 return NULL_RTX;
5779 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5780 tramp = round_trampoline_addr (tramp);
5781 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5782 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5783 #endif
5785 return tramp;
5788 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5789 function. The function first checks whether the back end provides
5790 an insn to implement signbit for the respective mode. If not, it
5791 checks whether the floating point format of the value is such that
5792 the sign bit can be extracted. If that is not the case, the
5793 function returns NULL_RTX to indicate that a normal call should be
5794 emitted rather than expanding the function in-line. EXP is the
5795 expression that is a call to the builtin function; if convenient,
5796 the result should be placed in TARGET. */
5797 static rtx
5798 expand_builtin_signbit (tree exp, rtx target)
5800 const struct real_format *fmt;
5801 enum machine_mode fmode, imode, rmode;
5802 HOST_WIDE_INT hi, lo;
5803 tree arg;
5804 int word, bitpos;
5805 enum insn_code icode;
5806 rtx temp;
5808 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5809 return NULL_RTX;
5811 arg = CALL_EXPR_ARG (exp, 0);
5812 fmode = TYPE_MODE (TREE_TYPE (arg));
5813 rmode = TYPE_MODE (TREE_TYPE (exp));
5814 fmt = REAL_MODE_FORMAT (fmode);
5816 arg = builtin_save_expr (arg);
5818 /* Expand the argument yielding a RTX expression. */
5819 temp = expand_normal (arg);
5821 /* Check if the back end provides an insn that handles signbit for the
5822 argument's mode. */
5823 icode = signbit_optab->handlers [(int) fmode].insn_code;
5824 if (icode != CODE_FOR_nothing)
5826 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5827 emit_unop_insn (icode, target, temp, UNKNOWN);
5828 return target;
5831 /* For floating point formats without a sign bit, implement signbit
5832 as "ARG < 0.0". */
5833 bitpos = fmt->signbit_ro;
5834 if (bitpos < 0)
5836 /* But we can't do this if the format supports signed zero. */
5837 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5838 return NULL_RTX;
5840 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5841 build_real (TREE_TYPE (arg), dconst0));
5842 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5845 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5847 imode = int_mode_for_mode (fmode);
5848 if (imode == BLKmode)
5849 return NULL_RTX;
5850 temp = gen_lowpart (imode, temp);
5852 else
5854 imode = word_mode;
5855 /* Handle targets with different FP word orders. */
5856 if (FLOAT_WORDS_BIG_ENDIAN)
5857 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5858 else
5859 word = bitpos / BITS_PER_WORD;
5860 temp = operand_subword_force (temp, word, fmode);
5861 bitpos = bitpos % BITS_PER_WORD;
5864 /* Force the intermediate word_mode (or narrower) result into a
5865 register. This avoids attempting to create paradoxical SUBREGs
5866 of floating point modes below. */
5867 temp = force_reg (imode, temp);
5869 /* If the bitpos is within the "result mode" lowpart, the operation
5870 can be implement with a single bitwise AND. Otherwise, we need
5871 a right shift and an AND. */
5873 if (bitpos < GET_MODE_BITSIZE (rmode))
5875 if (bitpos < HOST_BITS_PER_WIDE_INT)
5877 hi = 0;
5878 lo = (HOST_WIDE_INT) 1 << bitpos;
5880 else
5882 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5883 lo = 0;
5886 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5887 temp = gen_lowpart (rmode, temp);
5888 temp = expand_binop (rmode, and_optab, temp,
5889 immed_double_const (lo, hi, rmode),
5890 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5892 else
5894 /* Perform a logical right shift to place the signbit in the least
5895 significant bit, then truncate the result to the desired mode
5896 and mask just this bit. */
5897 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5898 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5899 temp = gen_lowpart (rmode, temp);
5900 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5901 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5904 return temp;
5907 /* Expand fork or exec calls. TARGET is the desired target of the
5908 call. EXP is the call. FN is the
5909 identificator of the actual function. IGNORE is nonzero if the
5910 value is to be ignored. */
5912 static rtx
5913 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5915 tree id, decl;
5916 tree call;
5918 /* If we are not profiling, just call the function. */
5919 if (!profile_arc_flag)
5920 return NULL_RTX;
5922 /* Otherwise call the wrapper. This should be equivalent for the rest of
5923 compiler, so the code does not diverge, and the wrapper may run the
5924 code necessary for keeping the profiling sane. */
5926 switch (DECL_FUNCTION_CODE (fn))
5928 case BUILT_IN_FORK:
5929 id = get_identifier ("__gcov_fork");
5930 break;
5932 case BUILT_IN_EXECL:
5933 id = get_identifier ("__gcov_execl");
5934 break;
5936 case BUILT_IN_EXECV:
5937 id = get_identifier ("__gcov_execv");
5938 break;
5940 case BUILT_IN_EXECLP:
5941 id = get_identifier ("__gcov_execlp");
5942 break;
5944 case BUILT_IN_EXECLE:
5945 id = get_identifier ("__gcov_execle");
5946 break;
5948 case BUILT_IN_EXECVP:
5949 id = get_identifier ("__gcov_execvp");
5950 break;
5952 case BUILT_IN_EXECVE:
5953 id = get_identifier ("__gcov_execve");
5954 break;
5956 default:
5957 gcc_unreachable ();
5960 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5961 DECL_EXTERNAL (decl) = 1;
5962 TREE_PUBLIC (decl) = 1;
5963 DECL_ARTIFICIAL (decl) = 1;
5964 TREE_NOTHROW (decl) = 1;
5965 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5966 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5967 call = rewrite_call_expr (exp, 0, decl, 0);
5968 return expand_call (call, target, ignore);
5973 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5974 the pointer in these functions is void*, the tree optimizers may remove
5975 casts. The mode computed in expand_builtin isn't reliable either, due
5976 to __sync_bool_compare_and_swap.
5978 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5979 group of builtins. This gives us log2 of the mode size. */
5981 static inline enum machine_mode
5982 get_builtin_sync_mode (int fcode_diff)
5984 /* The size is not negotiable, so ask not to get BLKmode in return
5985 if the target indicates that a smaller size would be better. */
5986 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5989 /* Expand the memory expression LOC and return the appropriate memory operand
5990 for the builtin_sync operations. */
5992 static rtx
5993 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5995 rtx addr, mem;
5997 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5999 /* Note that we explicitly do not want any alias information for this
6000 memory, so that we kill all other live memories. Otherwise we don't
6001 satisfy the full barrier semantics of the intrinsic. */
6002 mem = validize_mem (gen_rtx_MEM (mode, addr));
6004 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6005 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6006 MEM_VOLATILE_P (mem) = 1;
6008 return mem;
6011 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6012 EXP is the CALL_EXPR. CODE is the rtx code
6013 that corresponds to the arithmetic or logical operation from the name;
6014 an exception here is that NOT actually means NAND. TARGET is an optional
6015 place for us to store the results; AFTER is true if this is the
6016 fetch_and_xxx form. IGNORE is true if we don't actually care about
6017 the result of the operation at all. */
6019 static rtx
6020 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6021 enum rtx_code code, bool after,
6022 rtx target, bool ignore)
6024 rtx val, mem;
6025 enum machine_mode old_mode;
6027 if (code == NOT && warn_sync_nand)
6029 tree fndecl = get_callee_fndecl (exp);
6030 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6032 static bool warned_f_a_n, warned_n_a_f;
6034 switch (fcode)
6036 case BUILT_IN_FETCH_AND_NAND_1:
6037 case BUILT_IN_FETCH_AND_NAND_2:
6038 case BUILT_IN_FETCH_AND_NAND_4:
6039 case BUILT_IN_FETCH_AND_NAND_8:
6040 case BUILT_IN_FETCH_AND_NAND_16:
6042 if (warned_f_a_n)
6043 break;
6045 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6046 inform (input_location,
6047 "%qD changed semantics in GCC 4.4", fndecl);
6048 warned_f_a_n = true;
6049 break;
6051 case BUILT_IN_NAND_AND_FETCH_1:
6052 case BUILT_IN_NAND_AND_FETCH_2:
6053 case BUILT_IN_NAND_AND_FETCH_4:
6054 case BUILT_IN_NAND_AND_FETCH_8:
6055 case BUILT_IN_NAND_AND_FETCH_16:
6057 if (warned_n_a_f)
6058 break;
6060 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6061 inform (input_location,
6062 "%qD changed semantics in GCC 4.4", fndecl);
6063 warned_n_a_f = true;
6064 break;
6066 default:
6067 gcc_unreachable ();
6071 /* Expand the operands. */
6072 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6074 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6075 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6076 of CONST_INTs, where we know the old_mode only from the call argument. */
6077 old_mode = GET_MODE (val);
6078 if (old_mode == VOIDmode)
6079 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6080 val = convert_modes (mode, old_mode, val, 1);
6082 if (ignore)
6083 return expand_sync_operation (mem, val, code);
6084 else
6085 return expand_sync_fetch_operation (mem, val, code, after, target);
6088 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6089 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6090 true if this is the boolean form. TARGET is a place for us to store the
6091 results; this is NOT optional if IS_BOOL is true. */
6093 static rtx
6094 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6095 bool is_bool, rtx target)
6097 rtx old_val, new_val, mem;
6098 enum machine_mode old_mode;
6100 /* Expand the operands. */
6101 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6104 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6105 mode, EXPAND_NORMAL);
6106 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6107 of CONST_INTs, where we know the old_mode only from the call argument. */
6108 old_mode = GET_MODE (old_val);
6109 if (old_mode == VOIDmode)
6110 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6111 old_val = convert_modes (mode, old_mode, old_val, 1);
6113 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6114 mode, EXPAND_NORMAL);
6115 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6116 of CONST_INTs, where we know the old_mode only from the call argument. */
6117 old_mode = GET_MODE (new_val);
6118 if (old_mode == VOIDmode)
6119 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6120 new_val = convert_modes (mode, old_mode, new_val, 1);
6122 if (is_bool)
6123 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6124 else
6125 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6128 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6129 general form is actually an atomic exchange, and some targets only
6130 support a reduced form with the second argument being a constant 1.
6131 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6132 the results. */
6134 static rtx
6135 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6136 rtx target)
6138 rtx val, mem;
6139 enum machine_mode old_mode;
6141 /* Expand the operands. */
6142 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6143 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6144 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6145 of CONST_INTs, where we know the old_mode only from the call argument. */
6146 old_mode = GET_MODE (val);
6147 if (old_mode == VOIDmode)
6148 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6149 val = convert_modes (mode, old_mode, val, 1);
6151 return expand_sync_lock_test_and_set (mem, val, target);
6154 /* Expand the __sync_synchronize intrinsic. */
6156 static void
6157 expand_builtin_synchronize (void)
6159 tree x;
6161 #ifdef HAVE_memory_barrier
6162 if (HAVE_memory_barrier)
6164 emit_insn (gen_memory_barrier ());
6165 return;
6167 #endif
6169 if (synchronize_libfunc != NULL_RTX)
6171 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6172 return;
6175 /* If no explicit memory barrier instruction is available, create an
6176 empty asm stmt with a memory clobber. */
6177 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6178 tree_cons (NULL, build_string (6, "memory"), NULL));
6179 ASM_VOLATILE_P (x) = 1;
6180 expand_asm_expr (x);
6183 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6185 static void
6186 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6188 enum insn_code icode;
6189 rtx mem, insn;
6190 rtx val = const0_rtx;
6192 /* Expand the operands. */
6193 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6195 /* If there is an explicit operation in the md file, use it. */
6196 icode = sync_lock_release[mode];
6197 if (icode != CODE_FOR_nothing)
6199 if (!insn_data[icode].operand[1].predicate (val, mode))
6200 val = force_reg (mode, val);
6202 insn = GEN_FCN (icode) (mem, val);
6203 if (insn)
6205 emit_insn (insn);
6206 return;
6210 /* Otherwise we can implement this operation by emitting a barrier
6211 followed by a store of zero. */
6212 expand_builtin_synchronize ();
6213 emit_move_insn (mem, val);
6216 /* Expand an expression EXP that calls a built-in function,
6217 with result going to TARGET if that's convenient
6218 (and in mode MODE if that's convenient).
6219 SUBTARGET may be used as the target for computing one of EXP's operands.
6220 IGNORE is nonzero if the value is to be ignored. */
6223 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6224 int ignore)
6226 tree fndecl = get_callee_fndecl (exp);
6227 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6228 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6230 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6231 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6233 /* When not optimizing, generate calls to library functions for a certain
6234 set of builtins. */
6235 if (!optimize
6236 && !called_as_built_in (fndecl)
6237 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6238 && fcode != BUILT_IN_ALLOCA
6239 && fcode != BUILT_IN_FREE)
6240 return expand_call (exp, target, ignore);
6242 /* The built-in function expanders test for target == const0_rtx
6243 to determine whether the function's result will be ignored. */
6244 if (ignore)
6245 target = const0_rtx;
6247 /* If the result of a pure or const built-in function is ignored, and
6248 none of its arguments are volatile, we can avoid expanding the
6249 built-in call and just evaluate the arguments for side-effects. */
6250 if (target == const0_rtx
6251 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6253 bool volatilep = false;
6254 tree arg;
6255 call_expr_arg_iterator iter;
6257 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6258 if (TREE_THIS_VOLATILE (arg))
6260 volatilep = true;
6261 break;
6264 if (! volatilep)
6266 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6267 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6268 return const0_rtx;
6272 switch (fcode)
6274 CASE_FLT_FN (BUILT_IN_FABS):
6275 target = expand_builtin_fabs (exp, target, subtarget);
6276 if (target)
6277 return target;
6278 break;
6280 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6281 target = expand_builtin_copysign (exp, target, subtarget);
6282 if (target)
6283 return target;
6284 break;
6286 /* Just do a normal library call if we were unable to fold
6287 the values. */
6288 CASE_FLT_FN (BUILT_IN_CABS):
6289 break;
6291 CASE_FLT_FN (BUILT_IN_EXP):
6292 CASE_FLT_FN (BUILT_IN_EXP10):
6293 CASE_FLT_FN (BUILT_IN_POW10):
6294 CASE_FLT_FN (BUILT_IN_EXP2):
6295 CASE_FLT_FN (BUILT_IN_EXPM1):
6296 CASE_FLT_FN (BUILT_IN_LOGB):
6297 CASE_FLT_FN (BUILT_IN_LOG):
6298 CASE_FLT_FN (BUILT_IN_LOG10):
6299 CASE_FLT_FN (BUILT_IN_LOG2):
6300 CASE_FLT_FN (BUILT_IN_LOG1P):
6301 CASE_FLT_FN (BUILT_IN_TAN):
6302 CASE_FLT_FN (BUILT_IN_ASIN):
6303 CASE_FLT_FN (BUILT_IN_ACOS):
6304 CASE_FLT_FN (BUILT_IN_ATAN):
6305 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6306 because of possible accuracy problems. */
6307 if (! flag_unsafe_math_optimizations)
6308 break;
6309 CASE_FLT_FN (BUILT_IN_SQRT):
6310 CASE_FLT_FN (BUILT_IN_FLOOR):
6311 CASE_FLT_FN (BUILT_IN_CEIL):
6312 CASE_FLT_FN (BUILT_IN_TRUNC):
6313 CASE_FLT_FN (BUILT_IN_ROUND):
6314 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6315 CASE_FLT_FN (BUILT_IN_RINT):
6316 target = expand_builtin_mathfn (exp, target, subtarget);
6317 if (target)
6318 return target;
6319 break;
6321 CASE_FLT_FN (BUILT_IN_ILOGB):
6322 if (! flag_unsafe_math_optimizations)
6323 break;
6324 CASE_FLT_FN (BUILT_IN_ISINF):
6325 CASE_FLT_FN (BUILT_IN_FINITE):
6326 case BUILT_IN_ISFINITE:
6327 case BUILT_IN_ISNORMAL:
6328 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6329 if (target)
6330 return target;
6331 break;
6333 CASE_FLT_FN (BUILT_IN_LCEIL):
6334 CASE_FLT_FN (BUILT_IN_LLCEIL):
6335 CASE_FLT_FN (BUILT_IN_LFLOOR):
6336 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6337 target = expand_builtin_int_roundingfn (exp, target);
6338 if (target)
6339 return target;
6340 break;
6342 CASE_FLT_FN (BUILT_IN_LRINT):
6343 CASE_FLT_FN (BUILT_IN_LLRINT):
6344 CASE_FLT_FN (BUILT_IN_LROUND):
6345 CASE_FLT_FN (BUILT_IN_LLROUND):
6346 target = expand_builtin_int_roundingfn_2 (exp, target);
6347 if (target)
6348 return target;
6349 break;
6351 CASE_FLT_FN (BUILT_IN_POW):
6352 target = expand_builtin_pow (exp, target, subtarget);
6353 if (target)
6354 return target;
6355 break;
6357 CASE_FLT_FN (BUILT_IN_POWI):
6358 target = expand_builtin_powi (exp, target, subtarget);
6359 if (target)
6360 return target;
6361 break;
6363 CASE_FLT_FN (BUILT_IN_ATAN2):
6364 CASE_FLT_FN (BUILT_IN_LDEXP):
6365 CASE_FLT_FN (BUILT_IN_SCALB):
6366 CASE_FLT_FN (BUILT_IN_SCALBN):
6367 CASE_FLT_FN (BUILT_IN_SCALBLN):
6368 if (! flag_unsafe_math_optimizations)
6369 break;
6371 CASE_FLT_FN (BUILT_IN_FMOD):
6372 CASE_FLT_FN (BUILT_IN_REMAINDER):
6373 CASE_FLT_FN (BUILT_IN_DREM):
6374 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6375 if (target)
6376 return target;
6377 break;
6379 CASE_FLT_FN (BUILT_IN_CEXPI):
6380 target = expand_builtin_cexpi (exp, target, subtarget);
6381 gcc_assert (target);
6382 return target;
6384 CASE_FLT_FN (BUILT_IN_SIN):
6385 CASE_FLT_FN (BUILT_IN_COS):
6386 if (! flag_unsafe_math_optimizations)
6387 break;
6388 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6389 if (target)
6390 return target;
6391 break;
6393 CASE_FLT_FN (BUILT_IN_SINCOS):
6394 if (! flag_unsafe_math_optimizations)
6395 break;
6396 target = expand_builtin_sincos (exp);
6397 if (target)
6398 return target;
6399 break;
6401 case BUILT_IN_APPLY_ARGS:
6402 return expand_builtin_apply_args ();
6404 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6405 FUNCTION with a copy of the parameters described by
6406 ARGUMENTS, and ARGSIZE. It returns a block of memory
6407 allocated on the stack into which is stored all the registers
6408 that might possibly be used for returning the result of a
6409 function. ARGUMENTS is the value returned by
6410 __builtin_apply_args. ARGSIZE is the number of bytes of
6411 arguments that must be copied. ??? How should this value be
6412 computed? We'll also need a safe worst case value for varargs
6413 functions. */
6414 case BUILT_IN_APPLY:
6415 if (!validate_arglist (exp, POINTER_TYPE,
6416 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6417 && !validate_arglist (exp, REFERENCE_TYPE,
6418 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6419 return const0_rtx;
6420 else
6422 rtx ops[3];
6424 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6425 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6426 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6428 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6431 /* __builtin_return (RESULT) causes the function to return the
6432 value described by RESULT. RESULT is address of the block of
6433 memory returned by __builtin_apply. */
6434 case BUILT_IN_RETURN:
6435 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6436 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6437 return const0_rtx;
6439 case BUILT_IN_SAVEREGS:
6440 return expand_builtin_saveregs ();
6442 case BUILT_IN_ARGS_INFO:
6443 return expand_builtin_args_info (exp);
6445 case BUILT_IN_VA_ARG_PACK:
6446 /* All valid uses of __builtin_va_arg_pack () are removed during
6447 inlining. */
6448 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6449 return const0_rtx;
6451 case BUILT_IN_VA_ARG_PACK_LEN:
6452 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6453 inlining. */
6454 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6455 return const0_rtx;
6457 /* Return the address of the first anonymous stack arg. */
6458 case BUILT_IN_NEXT_ARG:
6459 if (fold_builtin_next_arg (exp, false))
6460 return const0_rtx;
6461 return expand_builtin_next_arg ();
6463 case BUILT_IN_CLEAR_CACHE:
6464 target = expand_builtin___clear_cache (exp);
6465 if (target)
6466 return target;
6467 break;
6469 case BUILT_IN_CLASSIFY_TYPE:
6470 return expand_builtin_classify_type (exp);
6472 case BUILT_IN_CONSTANT_P:
6473 return const0_rtx;
6475 case BUILT_IN_FRAME_ADDRESS:
6476 case BUILT_IN_RETURN_ADDRESS:
6477 return expand_builtin_frame_address (fndecl, exp);
6479 /* Returns the address of the area where the structure is returned.
6480 0 otherwise. */
6481 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6482 if (call_expr_nargs (exp) != 0
6483 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6484 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6485 return const0_rtx;
6486 else
6487 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6489 case BUILT_IN_ALLOCA:
6490 target = expand_builtin_alloca (exp, target);
6491 if (target)
6492 return target;
6493 break;
6495 case BUILT_IN_STACK_SAVE:
6496 return expand_stack_save ();
6498 case BUILT_IN_STACK_RESTORE:
6499 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6500 return const0_rtx;
6502 case BUILT_IN_BSWAP32:
6503 case BUILT_IN_BSWAP64:
6504 target = expand_builtin_bswap (exp, target, subtarget);
6506 if (target)
6507 return target;
6508 break;
6510 CASE_INT_FN (BUILT_IN_FFS):
6511 case BUILT_IN_FFSIMAX:
6512 target = expand_builtin_unop (target_mode, exp, target,
6513 subtarget, ffs_optab);
6514 if (target)
6515 return target;
6516 break;
6518 CASE_INT_FN (BUILT_IN_CLZ):
6519 case BUILT_IN_CLZIMAX:
6520 target = expand_builtin_unop (target_mode, exp, target,
6521 subtarget, clz_optab);
6522 if (target)
6523 return target;
6524 break;
6526 CASE_INT_FN (BUILT_IN_CTZ):
6527 case BUILT_IN_CTZIMAX:
6528 target = expand_builtin_unop (target_mode, exp, target,
6529 subtarget, ctz_optab);
6530 if (target)
6531 return target;
6532 break;
6534 CASE_INT_FN (BUILT_IN_POPCOUNT):
6535 case BUILT_IN_POPCOUNTIMAX:
6536 target = expand_builtin_unop (target_mode, exp, target,
6537 subtarget, popcount_optab);
6538 if (target)
6539 return target;
6540 break;
6542 CASE_INT_FN (BUILT_IN_PARITY):
6543 case BUILT_IN_PARITYIMAX:
6544 target = expand_builtin_unop (target_mode, exp, target,
6545 subtarget, parity_optab);
6546 if (target)
6547 return target;
6548 break;
6550 case BUILT_IN_STRLEN:
6551 target = expand_builtin_strlen (exp, target, target_mode);
6552 if (target)
6553 return target;
6554 break;
6556 case BUILT_IN_STRCPY:
6557 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6558 if (target)
6559 return target;
6560 break;
6562 case BUILT_IN_STRNCPY:
6563 target = expand_builtin_strncpy (exp, target, mode);
6564 if (target)
6565 return target;
6566 break;
6568 case BUILT_IN_STPCPY:
6569 target = expand_builtin_stpcpy (exp, target, mode);
6570 if (target)
6571 return target;
6572 break;
6574 case BUILT_IN_STRCAT:
6575 target = expand_builtin_strcat (fndecl, exp, target, mode);
6576 if (target)
6577 return target;
6578 break;
6580 case BUILT_IN_STRNCAT:
6581 target = expand_builtin_strncat (exp, target, mode);
6582 if (target)
6583 return target;
6584 break;
6586 case BUILT_IN_STRSPN:
6587 target = expand_builtin_strspn (exp, target, mode);
6588 if (target)
6589 return target;
6590 break;
6592 case BUILT_IN_STRCSPN:
6593 target = expand_builtin_strcspn (exp, target, mode);
6594 if (target)
6595 return target;
6596 break;
6598 case BUILT_IN_STRSTR:
6599 target = expand_builtin_strstr (exp, target, mode);
6600 if (target)
6601 return target;
6602 break;
6604 case BUILT_IN_STRPBRK:
6605 target = expand_builtin_strpbrk (exp, target, mode);
6606 if (target)
6607 return target;
6608 break;
6610 case BUILT_IN_INDEX:
6611 case BUILT_IN_STRCHR:
6612 target = expand_builtin_strchr (exp, target, mode);
6613 if (target)
6614 return target;
6615 break;
6617 case BUILT_IN_RINDEX:
6618 case BUILT_IN_STRRCHR:
6619 target = expand_builtin_strrchr (exp, target, mode);
6620 if (target)
6621 return target;
6622 break;
6624 case BUILT_IN_MEMCPY:
6625 target = expand_builtin_memcpy (exp, target, mode);
6626 if (target)
6627 return target;
6628 break;
6630 case BUILT_IN_MEMPCPY:
6631 target = expand_builtin_mempcpy (exp, target, mode);
6632 if (target)
6633 return target;
6634 break;
6636 case BUILT_IN_MEMMOVE:
6637 target = expand_builtin_memmove (exp, target, mode, ignore);
6638 if (target)
6639 return target;
6640 break;
6642 case BUILT_IN_BCOPY:
6643 target = expand_builtin_bcopy (exp, ignore);
6644 if (target)
6645 return target;
6646 break;
6648 case BUILT_IN_MEMSET:
6649 target = expand_builtin_memset (exp, target, mode);
6650 if (target)
6651 return target;
6652 break;
6654 case BUILT_IN_BZERO:
6655 target = expand_builtin_bzero (exp);
6656 if (target)
6657 return target;
6658 break;
6660 case BUILT_IN_STRCMP:
6661 target = expand_builtin_strcmp (exp, target, mode);
6662 if (target)
6663 return target;
6664 break;
6666 case BUILT_IN_STRNCMP:
6667 target = expand_builtin_strncmp (exp, target, mode);
6668 if (target)
6669 return target;
6670 break;
6672 case BUILT_IN_MEMCHR:
6673 target = expand_builtin_memchr (exp, target, mode);
6674 if (target)
6675 return target;
6676 break;
6678 case BUILT_IN_BCMP:
6679 case BUILT_IN_MEMCMP:
6680 target = expand_builtin_memcmp (exp, target, mode);
6681 if (target)
6682 return target;
6683 break;
6685 case BUILT_IN_SETJMP:
6686 /* This should have been lowered to the builtins below. */
6687 gcc_unreachable ();
6689 case BUILT_IN_SETJMP_SETUP:
6690 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6691 and the receiver label. */
6692 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6694 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6695 VOIDmode, EXPAND_NORMAL);
6696 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6697 rtx label_r = label_rtx (label);
6699 /* This is copied from the handling of non-local gotos. */
6700 expand_builtin_setjmp_setup (buf_addr, label_r);
6701 nonlocal_goto_handler_labels
6702 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6703 nonlocal_goto_handler_labels);
6704 /* ??? Do not let expand_label treat us as such since we would
6705 not want to be both on the list of non-local labels and on
6706 the list of forced labels. */
6707 FORCED_LABEL (label) = 0;
6708 return const0_rtx;
6710 break;
6712 case BUILT_IN_SETJMP_DISPATCHER:
6713 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6714 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6716 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6717 rtx label_r = label_rtx (label);
6719 /* Remove the dispatcher label from the list of non-local labels
6720 since the receiver labels have been added to it above. */
6721 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6722 return const0_rtx;
6724 break;
6726 case BUILT_IN_SETJMP_RECEIVER:
6727 /* __builtin_setjmp_receiver is passed the receiver label. */
6728 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6730 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6731 rtx label_r = label_rtx (label);
6733 expand_builtin_setjmp_receiver (label_r);
6734 return const0_rtx;
6736 break;
6738 /* __builtin_longjmp is passed a pointer to an array of five words.
6739 It's similar to the C library longjmp function but works with
6740 __builtin_setjmp above. */
6741 case BUILT_IN_LONGJMP:
6742 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6744 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6745 VOIDmode, EXPAND_NORMAL);
6746 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6748 if (value != const1_rtx)
6750 error ("%<__builtin_longjmp%> second argument must be 1");
6751 return const0_rtx;
6754 expand_builtin_longjmp (buf_addr, value);
6755 return const0_rtx;
6757 break;
6759 case BUILT_IN_NONLOCAL_GOTO:
6760 target = expand_builtin_nonlocal_goto (exp);
6761 if (target)
6762 return target;
6763 break;
6765 /* This updates the setjmp buffer that is its argument with the value
6766 of the current stack pointer. */
6767 case BUILT_IN_UPDATE_SETJMP_BUF:
6768 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6770 rtx buf_addr
6771 = expand_normal (CALL_EXPR_ARG (exp, 0));
6773 expand_builtin_update_setjmp_buf (buf_addr);
6774 return const0_rtx;
6776 break;
6778 case BUILT_IN_TRAP:
6779 expand_builtin_trap ();
6780 return const0_rtx;
6782 case BUILT_IN_PRINTF:
6783 target = expand_builtin_printf (exp, target, mode, false);
6784 if (target)
6785 return target;
6786 break;
6788 case BUILT_IN_PRINTF_UNLOCKED:
6789 target = expand_builtin_printf (exp, target, mode, true);
6790 if (target)
6791 return target;
6792 break;
6794 case BUILT_IN_FPUTS:
6795 target = expand_builtin_fputs (exp, target, false);
6796 if (target)
6797 return target;
6798 break;
6799 case BUILT_IN_FPUTS_UNLOCKED:
6800 target = expand_builtin_fputs (exp, target, true);
6801 if (target)
6802 return target;
6803 break;
6805 case BUILT_IN_FPRINTF:
6806 target = expand_builtin_fprintf (exp, target, mode, false);
6807 if (target)
6808 return target;
6809 break;
6811 case BUILT_IN_FPRINTF_UNLOCKED:
6812 target = expand_builtin_fprintf (exp, target, mode, true);
6813 if (target)
6814 return target;
6815 break;
6817 case BUILT_IN_SPRINTF:
6818 target = expand_builtin_sprintf (exp, target, mode);
6819 if (target)
6820 return target;
6821 break;
6823 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6824 case BUILT_IN_SIGNBITD32:
6825 case BUILT_IN_SIGNBITD64:
6826 case BUILT_IN_SIGNBITD128:
6827 target = expand_builtin_signbit (exp, target);
6828 if (target)
6829 return target;
6830 break;
6832 /* Various hooks for the DWARF 2 __throw routine. */
6833 case BUILT_IN_UNWIND_INIT:
6834 expand_builtin_unwind_init ();
6835 return const0_rtx;
6836 case BUILT_IN_DWARF_CFA:
6837 return virtual_cfa_rtx;
6838 #ifdef DWARF2_UNWIND_INFO
6839 case BUILT_IN_DWARF_SP_COLUMN:
6840 return expand_builtin_dwarf_sp_column ();
6841 case BUILT_IN_INIT_DWARF_REG_SIZES:
6842 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6843 return const0_rtx;
6844 #endif
6845 case BUILT_IN_FROB_RETURN_ADDR:
6846 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6847 case BUILT_IN_EXTRACT_RETURN_ADDR:
6848 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6849 case BUILT_IN_EH_RETURN:
6850 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6851 CALL_EXPR_ARG (exp, 1));
6852 return const0_rtx;
6853 #ifdef EH_RETURN_DATA_REGNO
6854 case BUILT_IN_EH_RETURN_DATA_REGNO:
6855 return expand_builtin_eh_return_data_regno (exp);
6856 #endif
6857 case BUILT_IN_EXTEND_POINTER:
6858 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6860 case BUILT_IN_VA_START:
6861 return expand_builtin_va_start (exp);
6862 case BUILT_IN_VA_END:
6863 return expand_builtin_va_end (exp);
6864 case BUILT_IN_VA_COPY:
6865 return expand_builtin_va_copy (exp);
6866 case BUILT_IN_EXPECT:
6867 return expand_builtin_expect (exp, target);
6868 case BUILT_IN_PREFETCH:
6869 expand_builtin_prefetch (exp);
6870 return const0_rtx;
6872 case BUILT_IN_PROFILE_FUNC_ENTER:
6873 return expand_builtin_profile_func (false);
6874 case BUILT_IN_PROFILE_FUNC_EXIT:
6875 return expand_builtin_profile_func (true);
6877 case BUILT_IN_INIT_TRAMPOLINE:
6878 return expand_builtin_init_trampoline (exp);
6879 case BUILT_IN_ADJUST_TRAMPOLINE:
6880 return expand_builtin_adjust_trampoline (exp);
6882 case BUILT_IN_FORK:
6883 case BUILT_IN_EXECL:
6884 case BUILT_IN_EXECV:
6885 case BUILT_IN_EXECLP:
6886 case BUILT_IN_EXECLE:
6887 case BUILT_IN_EXECVP:
6888 case BUILT_IN_EXECVE:
6889 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6890 if (target)
6891 return target;
6892 break;
6894 case BUILT_IN_FETCH_AND_ADD_1:
6895 case BUILT_IN_FETCH_AND_ADD_2:
6896 case BUILT_IN_FETCH_AND_ADD_4:
6897 case BUILT_IN_FETCH_AND_ADD_8:
6898 case BUILT_IN_FETCH_AND_ADD_16:
6899 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6900 target = expand_builtin_sync_operation (mode, exp, PLUS,
6901 false, target, ignore);
6902 if (target)
6903 return target;
6904 break;
6906 case BUILT_IN_FETCH_AND_SUB_1:
6907 case BUILT_IN_FETCH_AND_SUB_2:
6908 case BUILT_IN_FETCH_AND_SUB_4:
6909 case BUILT_IN_FETCH_AND_SUB_8:
6910 case BUILT_IN_FETCH_AND_SUB_16:
6911 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6912 target = expand_builtin_sync_operation (mode, exp, MINUS,
6913 false, target, ignore);
6914 if (target)
6915 return target;
6916 break;
6918 case BUILT_IN_FETCH_AND_OR_1:
6919 case BUILT_IN_FETCH_AND_OR_2:
6920 case BUILT_IN_FETCH_AND_OR_4:
6921 case BUILT_IN_FETCH_AND_OR_8:
6922 case BUILT_IN_FETCH_AND_OR_16:
6923 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6924 target = expand_builtin_sync_operation (mode, exp, IOR,
6925 false, target, ignore);
6926 if (target)
6927 return target;
6928 break;
6930 case BUILT_IN_FETCH_AND_AND_1:
6931 case BUILT_IN_FETCH_AND_AND_2:
6932 case BUILT_IN_FETCH_AND_AND_4:
6933 case BUILT_IN_FETCH_AND_AND_8:
6934 case BUILT_IN_FETCH_AND_AND_16:
6935 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6936 target = expand_builtin_sync_operation (mode, exp, AND,
6937 false, target, ignore);
6938 if (target)
6939 return target;
6940 break;
6942 case BUILT_IN_FETCH_AND_XOR_1:
6943 case BUILT_IN_FETCH_AND_XOR_2:
6944 case BUILT_IN_FETCH_AND_XOR_4:
6945 case BUILT_IN_FETCH_AND_XOR_8:
6946 case BUILT_IN_FETCH_AND_XOR_16:
6947 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6948 target = expand_builtin_sync_operation (mode, exp, XOR,
6949 false, target, ignore);
6950 if (target)
6951 return target;
6952 break;
6954 case BUILT_IN_FETCH_AND_NAND_1:
6955 case BUILT_IN_FETCH_AND_NAND_2:
6956 case BUILT_IN_FETCH_AND_NAND_4:
6957 case BUILT_IN_FETCH_AND_NAND_8:
6958 case BUILT_IN_FETCH_AND_NAND_16:
6959 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6960 target = expand_builtin_sync_operation (mode, exp, NOT,
6961 false, target, ignore);
6962 if (target)
6963 return target;
6964 break;
6966 case BUILT_IN_ADD_AND_FETCH_1:
6967 case BUILT_IN_ADD_AND_FETCH_2:
6968 case BUILT_IN_ADD_AND_FETCH_4:
6969 case BUILT_IN_ADD_AND_FETCH_8:
6970 case BUILT_IN_ADD_AND_FETCH_16:
6971 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6972 target = expand_builtin_sync_operation (mode, exp, PLUS,
6973 true, target, ignore);
6974 if (target)
6975 return target;
6976 break;
6978 case BUILT_IN_SUB_AND_FETCH_1:
6979 case BUILT_IN_SUB_AND_FETCH_2:
6980 case BUILT_IN_SUB_AND_FETCH_4:
6981 case BUILT_IN_SUB_AND_FETCH_8:
6982 case BUILT_IN_SUB_AND_FETCH_16:
6983 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6984 target = expand_builtin_sync_operation (mode, exp, MINUS,
6985 true, target, ignore);
6986 if (target)
6987 return target;
6988 break;
6990 case BUILT_IN_OR_AND_FETCH_1:
6991 case BUILT_IN_OR_AND_FETCH_2:
6992 case BUILT_IN_OR_AND_FETCH_4:
6993 case BUILT_IN_OR_AND_FETCH_8:
6994 case BUILT_IN_OR_AND_FETCH_16:
6995 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6996 target = expand_builtin_sync_operation (mode, exp, IOR,
6997 true, target, ignore);
6998 if (target)
6999 return target;
7000 break;
7002 case BUILT_IN_AND_AND_FETCH_1:
7003 case BUILT_IN_AND_AND_FETCH_2:
7004 case BUILT_IN_AND_AND_FETCH_4:
7005 case BUILT_IN_AND_AND_FETCH_8:
7006 case BUILT_IN_AND_AND_FETCH_16:
7007 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7008 target = expand_builtin_sync_operation (mode, exp, AND,
7009 true, target, ignore);
7010 if (target)
7011 return target;
7012 break;
7014 case BUILT_IN_XOR_AND_FETCH_1:
7015 case BUILT_IN_XOR_AND_FETCH_2:
7016 case BUILT_IN_XOR_AND_FETCH_4:
7017 case BUILT_IN_XOR_AND_FETCH_8:
7018 case BUILT_IN_XOR_AND_FETCH_16:
7019 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7020 target = expand_builtin_sync_operation (mode, exp, XOR,
7021 true, target, ignore);
7022 if (target)
7023 return target;
7024 break;
7026 case BUILT_IN_NAND_AND_FETCH_1:
7027 case BUILT_IN_NAND_AND_FETCH_2:
7028 case BUILT_IN_NAND_AND_FETCH_4:
7029 case BUILT_IN_NAND_AND_FETCH_8:
7030 case BUILT_IN_NAND_AND_FETCH_16:
7031 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7032 target = expand_builtin_sync_operation (mode, exp, NOT,
7033 true, target, ignore);
7034 if (target)
7035 return target;
7036 break;
7038 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7039 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7040 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7041 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7042 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7043 if (mode == VOIDmode)
7044 mode = TYPE_MODE (boolean_type_node);
7045 if (!target || !register_operand (target, mode))
7046 target = gen_reg_rtx (mode);
7048 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7049 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7050 if (target)
7051 return target;
7052 break;
7054 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7055 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7056 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7057 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7058 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7059 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7060 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7061 if (target)
7062 return target;
7063 break;
7065 case BUILT_IN_LOCK_TEST_AND_SET_1:
7066 case BUILT_IN_LOCK_TEST_AND_SET_2:
7067 case BUILT_IN_LOCK_TEST_AND_SET_4:
7068 case BUILT_IN_LOCK_TEST_AND_SET_8:
7069 case BUILT_IN_LOCK_TEST_AND_SET_16:
7070 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7071 target = expand_builtin_lock_test_and_set (mode, exp, target);
7072 if (target)
7073 return target;
7074 break;
7076 case BUILT_IN_LOCK_RELEASE_1:
7077 case BUILT_IN_LOCK_RELEASE_2:
7078 case BUILT_IN_LOCK_RELEASE_4:
7079 case BUILT_IN_LOCK_RELEASE_8:
7080 case BUILT_IN_LOCK_RELEASE_16:
7081 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7082 expand_builtin_lock_release (mode, exp);
7083 return const0_rtx;
7085 case BUILT_IN_SYNCHRONIZE:
7086 expand_builtin_synchronize ();
7087 return const0_rtx;
7089 case BUILT_IN_OBJECT_SIZE:
7090 return expand_builtin_object_size (exp);
7092 case BUILT_IN_MEMCPY_CHK:
7093 case BUILT_IN_MEMPCPY_CHK:
7094 case BUILT_IN_MEMMOVE_CHK:
7095 case BUILT_IN_MEMSET_CHK:
7096 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7097 if (target)
7098 return target;
7099 break;
7101 case BUILT_IN_STRCPY_CHK:
7102 case BUILT_IN_STPCPY_CHK:
7103 case BUILT_IN_STRNCPY_CHK:
7104 case BUILT_IN_STRCAT_CHK:
7105 case BUILT_IN_STRNCAT_CHK:
7106 case BUILT_IN_SNPRINTF_CHK:
7107 case BUILT_IN_VSNPRINTF_CHK:
7108 maybe_emit_chk_warning (exp, fcode);
7109 break;
7111 case BUILT_IN_SPRINTF_CHK:
7112 case BUILT_IN_VSPRINTF_CHK:
7113 maybe_emit_sprintf_chk_warning (exp, fcode);
7114 break;
7116 case BUILT_IN_FREE:
7117 maybe_emit_free_warning (exp);
7118 break;
7120 default: /* just do library call, if unknown builtin */
7121 break;
7124 /* The switch statement above can drop through to cause the function
7125 to be called normally. */
7126 return expand_call (exp, target, ignore);
7129 /* Determine whether a tree node represents a call to a built-in
7130 function. If the tree T is a call to a built-in function with
7131 the right number of arguments of the appropriate types, return
7132 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7133 Otherwise the return value is END_BUILTINS. */
7135 enum built_in_function
7136 builtin_mathfn_code (const_tree t)
7138 const_tree fndecl, arg, parmlist;
7139 const_tree argtype, parmtype;
7140 const_call_expr_arg_iterator iter;
7142 if (TREE_CODE (t) != CALL_EXPR
7143 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7144 return END_BUILTINS;
7146 fndecl = get_callee_fndecl (t);
7147 if (fndecl == NULL_TREE
7148 || TREE_CODE (fndecl) != FUNCTION_DECL
7149 || ! DECL_BUILT_IN (fndecl)
7150 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7151 return END_BUILTINS;
7153 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7154 init_const_call_expr_arg_iterator (t, &iter);
7155 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7157 /* If a function doesn't take a variable number of arguments,
7158 the last element in the list will have type `void'. */
7159 parmtype = TREE_VALUE (parmlist);
7160 if (VOID_TYPE_P (parmtype))
7162 if (more_const_call_expr_args_p (&iter))
7163 return END_BUILTINS;
7164 return DECL_FUNCTION_CODE (fndecl);
7167 if (! more_const_call_expr_args_p (&iter))
7168 return END_BUILTINS;
7170 arg = next_const_call_expr_arg (&iter);
7171 argtype = TREE_TYPE (arg);
7173 if (SCALAR_FLOAT_TYPE_P (parmtype))
7175 if (! SCALAR_FLOAT_TYPE_P (argtype))
7176 return END_BUILTINS;
7178 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7180 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7181 return END_BUILTINS;
7183 else if (POINTER_TYPE_P (parmtype))
7185 if (! POINTER_TYPE_P (argtype))
7186 return END_BUILTINS;
7188 else if (INTEGRAL_TYPE_P (parmtype))
7190 if (! INTEGRAL_TYPE_P (argtype))
7191 return END_BUILTINS;
7193 else
7194 return END_BUILTINS;
7197 /* Variable-length argument list. */
7198 return DECL_FUNCTION_CODE (fndecl);
7201 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7202 evaluate to a constant. */
7204 static tree
7205 fold_builtin_constant_p (tree arg)
7207 /* We return 1 for a numeric type that's known to be a constant
7208 value at compile-time or for an aggregate type that's a
7209 literal constant. */
7210 STRIP_NOPS (arg);
7212 /* If we know this is a constant, emit the constant of one. */
7213 if (CONSTANT_CLASS_P (arg)
7214 || (TREE_CODE (arg) == CONSTRUCTOR
7215 && TREE_CONSTANT (arg)))
7216 return integer_one_node;
7217 if (TREE_CODE (arg) == ADDR_EXPR)
7219 tree op = TREE_OPERAND (arg, 0);
7220 if (TREE_CODE (op) == STRING_CST
7221 || (TREE_CODE (op) == ARRAY_REF
7222 && integer_zerop (TREE_OPERAND (op, 1))
7223 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7224 return integer_one_node;
7227 /* If this expression has side effects, show we don't know it to be a
7228 constant. Likewise if it's a pointer or aggregate type since in
7229 those case we only want literals, since those are only optimized
7230 when generating RTL, not later.
7231 And finally, if we are compiling an initializer, not code, we
7232 need to return a definite result now; there's not going to be any
7233 more optimization done. */
7234 if (TREE_SIDE_EFFECTS (arg)
7235 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7236 || POINTER_TYPE_P (TREE_TYPE (arg))
7237 || cfun == 0
7238 || folding_initializer)
7239 return integer_zero_node;
7241 return NULL_TREE;
7244 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7245 return it as a truthvalue. */
7247 static tree
7248 build_builtin_expect_predicate (tree pred, tree expected)
7250 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7252 fn = built_in_decls[BUILT_IN_EXPECT];
7253 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7254 ret_type = TREE_TYPE (TREE_TYPE (fn));
7255 pred_type = TREE_VALUE (arg_types);
7256 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7258 pred = fold_convert (pred_type, pred);
7259 expected = fold_convert (expected_type, expected);
7260 call_expr = build_call_expr (fn, 2, pred, expected);
7262 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7263 build_int_cst (ret_type, 0));
7266 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7267 NULL_TREE if no simplification is possible. */
7269 static tree
7270 fold_builtin_expect (tree arg0, tree arg1)
7272 tree inner, fndecl;
7273 enum tree_code code;
7275 /* If this is a builtin_expect within a builtin_expect keep the
7276 inner one. See through a comparison against a constant. It
7277 might have been added to create a thruthvalue. */
7278 inner = arg0;
7279 if (COMPARISON_CLASS_P (inner)
7280 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7281 inner = TREE_OPERAND (inner, 0);
7283 if (TREE_CODE (inner) == CALL_EXPR
7284 && (fndecl = get_callee_fndecl (inner))
7285 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7286 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7287 return arg0;
7289 /* Distribute the expected value over short-circuiting operators.
7290 See through the cast from truthvalue_type_node to long. */
7291 inner = arg0;
7292 while (TREE_CODE (inner) == NOP_EXPR
7293 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7294 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7295 inner = TREE_OPERAND (inner, 0);
7297 code = TREE_CODE (inner);
7298 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7300 tree op0 = TREE_OPERAND (inner, 0);
7301 tree op1 = TREE_OPERAND (inner, 1);
7303 op0 = build_builtin_expect_predicate (op0, arg1);
7304 op1 = build_builtin_expect_predicate (op1, arg1);
7305 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7307 return fold_convert (TREE_TYPE (arg0), inner);
7310 /* If the argument isn't invariant then there's nothing else we can do. */
7311 if (!TREE_CONSTANT (arg0))
7312 return NULL_TREE;
7314 /* If we expect that a comparison against the argument will fold to
7315 a constant return the constant. In practice, this means a true
7316 constant or the address of a non-weak symbol. */
7317 inner = arg0;
7318 STRIP_NOPS (inner);
7319 if (TREE_CODE (inner) == ADDR_EXPR)
7323 inner = TREE_OPERAND (inner, 0);
7325 while (TREE_CODE (inner) == COMPONENT_REF
7326 || TREE_CODE (inner) == ARRAY_REF);
7327 if ((TREE_CODE (inner) == VAR_DECL
7328 || TREE_CODE (inner) == FUNCTION_DECL)
7329 && DECL_WEAK (inner))
7330 return NULL_TREE;
7333 /* Otherwise, ARG0 already has the proper type for the return value. */
7334 return arg0;
7337 /* Fold a call to __builtin_classify_type with argument ARG. */
7339 static tree
7340 fold_builtin_classify_type (tree arg)
7342 if (arg == 0)
7343 return build_int_cst (NULL_TREE, no_type_class);
7345 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7348 /* Fold a call to __builtin_strlen with argument ARG. */
7350 static tree
7351 fold_builtin_strlen (tree arg)
7353 if (!validate_arg (arg, POINTER_TYPE))
7354 return NULL_TREE;
7355 else
7357 tree len = c_strlen (arg, 0);
7359 if (len)
7361 /* Convert from the internal "sizetype" type to "size_t". */
7362 if (size_type_node)
7363 len = fold_convert (size_type_node, len);
7364 return len;
7367 return NULL_TREE;
7371 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7373 static tree
7374 fold_builtin_inf (tree type, int warn)
7376 REAL_VALUE_TYPE real;
7378 /* __builtin_inff is intended to be usable to define INFINITY on all
7379 targets. If an infinity is not available, INFINITY expands "to a
7380 positive constant of type float that overflows at translation
7381 time", footnote "In this case, using INFINITY will violate the
7382 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7383 Thus we pedwarn to ensure this constraint violation is
7384 diagnosed. */
7385 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7386 pedwarn (input_location, 0, "target format does not support infinity");
7388 real_inf (&real);
7389 return build_real (type, real);
7392 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7394 static tree
7395 fold_builtin_nan (tree arg, tree type, int quiet)
7397 REAL_VALUE_TYPE real;
7398 const char *str;
7400 if (!validate_arg (arg, POINTER_TYPE))
7401 return NULL_TREE;
7402 str = c_getstr (arg);
7403 if (!str)
7404 return NULL_TREE;
7406 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7407 return NULL_TREE;
7409 return build_real (type, real);
7412 /* Return true if the floating point expression T has an integer value.
7413 We also allow +Inf, -Inf and NaN to be considered integer values. */
7415 static bool
7416 integer_valued_real_p (tree t)
7418 switch (TREE_CODE (t))
7420 case FLOAT_EXPR:
7421 return true;
7423 case ABS_EXPR:
7424 case SAVE_EXPR:
7425 return integer_valued_real_p (TREE_OPERAND (t, 0));
7427 case COMPOUND_EXPR:
7428 case MODIFY_EXPR:
7429 case BIND_EXPR:
7430 return integer_valued_real_p (TREE_OPERAND (t, 1));
7432 case PLUS_EXPR:
7433 case MINUS_EXPR:
7434 case MULT_EXPR:
7435 case MIN_EXPR:
7436 case MAX_EXPR:
7437 return integer_valued_real_p (TREE_OPERAND (t, 0))
7438 && integer_valued_real_p (TREE_OPERAND (t, 1));
7440 case COND_EXPR:
7441 return integer_valued_real_p (TREE_OPERAND (t, 1))
7442 && integer_valued_real_p (TREE_OPERAND (t, 2));
7444 case REAL_CST:
7445 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7447 case NOP_EXPR:
7449 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7450 if (TREE_CODE (type) == INTEGER_TYPE)
7451 return true;
7452 if (TREE_CODE (type) == REAL_TYPE)
7453 return integer_valued_real_p (TREE_OPERAND (t, 0));
7454 break;
7457 case CALL_EXPR:
7458 switch (builtin_mathfn_code (t))
7460 CASE_FLT_FN (BUILT_IN_CEIL):
7461 CASE_FLT_FN (BUILT_IN_FLOOR):
7462 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7463 CASE_FLT_FN (BUILT_IN_RINT):
7464 CASE_FLT_FN (BUILT_IN_ROUND):
7465 CASE_FLT_FN (BUILT_IN_TRUNC):
7466 return true;
7468 CASE_FLT_FN (BUILT_IN_FMIN):
7469 CASE_FLT_FN (BUILT_IN_FMAX):
7470 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7471 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7473 default:
7474 break;
7476 break;
7478 default:
7479 break;
7481 return false;
7484 /* FNDECL is assumed to be a builtin where truncation can be propagated
7485 across (for instance floor((double)f) == (double)floorf (f).
7486 Do the transformation for a call with argument ARG. */
7488 static tree
7489 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7491 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7493 if (!validate_arg (arg, REAL_TYPE))
7494 return NULL_TREE;
7496 /* Integer rounding functions are idempotent. */
7497 if (fcode == builtin_mathfn_code (arg))
7498 return arg;
7500 /* If argument is already integer valued, and we don't need to worry
7501 about setting errno, there's no need to perform rounding. */
7502 if (! flag_errno_math && integer_valued_real_p (arg))
7503 return arg;
7505 if (optimize)
7507 tree arg0 = strip_float_extensions (arg);
7508 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7509 tree newtype = TREE_TYPE (arg0);
7510 tree decl;
7512 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7513 && (decl = mathfn_built_in (newtype, fcode)))
7514 return fold_convert (ftype,
7515 build_call_expr (decl, 1,
7516 fold_convert (newtype, arg0)));
7518 return NULL_TREE;
7521 /* FNDECL is assumed to be builtin which can narrow the FP type of
7522 the argument, for instance lround((double)f) -> lroundf (f).
7523 Do the transformation for a call with argument ARG. */
7525 static tree
7526 fold_fixed_mathfn (tree fndecl, tree arg)
7528 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7530 if (!validate_arg (arg, REAL_TYPE))
7531 return NULL_TREE;
7533 /* If argument is already integer valued, and we don't need to worry
7534 about setting errno, there's no need to perform rounding. */
7535 if (! flag_errno_math && integer_valued_real_p (arg))
7536 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7538 if (optimize)
7540 tree ftype = TREE_TYPE (arg);
7541 tree arg0 = strip_float_extensions (arg);
7542 tree newtype = TREE_TYPE (arg0);
7543 tree decl;
7545 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7546 && (decl = mathfn_built_in (newtype, fcode)))
7547 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7550 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7551 sizeof (long long) == sizeof (long). */
7552 if (TYPE_PRECISION (long_long_integer_type_node)
7553 == TYPE_PRECISION (long_integer_type_node))
7555 tree newfn = NULL_TREE;
7556 switch (fcode)
7558 CASE_FLT_FN (BUILT_IN_LLCEIL):
7559 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7560 break;
7562 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7563 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7564 break;
7566 CASE_FLT_FN (BUILT_IN_LLROUND):
7567 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7568 break;
7570 CASE_FLT_FN (BUILT_IN_LLRINT):
7571 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7572 break;
7574 default:
7575 break;
7578 if (newfn)
7580 tree newcall = build_call_expr(newfn, 1, arg);
7581 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7585 return NULL_TREE;
7588 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7589 return type. Return NULL_TREE if no simplification can be made. */
7591 static tree
7592 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7594 tree res;
7596 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7597 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7598 return NULL_TREE;
7600 /* Calculate the result when the argument is a constant. */
7601 if (TREE_CODE (arg) == COMPLEX_CST
7602 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7603 type, mpfr_hypot)))
7604 return res;
7606 if (TREE_CODE (arg) == COMPLEX_EXPR)
7608 tree real = TREE_OPERAND (arg, 0);
7609 tree imag = TREE_OPERAND (arg, 1);
7611 /* If either part is zero, cabs is fabs of the other. */
7612 if (real_zerop (real))
7613 return fold_build1 (ABS_EXPR, type, imag);
7614 if (real_zerop (imag))
7615 return fold_build1 (ABS_EXPR, type, real);
7617 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7618 if (flag_unsafe_math_optimizations
7619 && operand_equal_p (real, imag, OEP_PURE_SAME))
7621 const REAL_VALUE_TYPE sqrt2_trunc
7622 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7623 STRIP_NOPS (real);
7624 return fold_build2 (MULT_EXPR, type,
7625 fold_build1 (ABS_EXPR, type, real),
7626 build_real (type, sqrt2_trunc));
7630 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7631 if (TREE_CODE (arg) == NEGATE_EXPR
7632 || TREE_CODE (arg) == CONJ_EXPR)
7633 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7635 /* Don't do this when optimizing for size. */
7636 if (flag_unsafe_math_optimizations
7637 && optimize && optimize_function_for_speed_p (cfun))
7639 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7641 if (sqrtfn != NULL_TREE)
7643 tree rpart, ipart, result;
7645 arg = builtin_save_expr (arg);
7647 rpart = fold_build1 (REALPART_EXPR, type, arg);
7648 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7650 rpart = builtin_save_expr (rpart);
7651 ipart = builtin_save_expr (ipart);
7653 result = fold_build2 (PLUS_EXPR, type,
7654 fold_build2 (MULT_EXPR, type,
7655 rpart, rpart),
7656 fold_build2 (MULT_EXPR, type,
7657 ipart, ipart));
7659 return build_call_expr (sqrtfn, 1, result);
7663 return NULL_TREE;
7666 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7667 Return NULL_TREE if no simplification can be made. */
7669 static tree
7670 fold_builtin_sqrt (tree arg, tree type)
7673 enum built_in_function fcode;
7674 tree res;
7676 if (!validate_arg (arg, REAL_TYPE))
7677 return NULL_TREE;
7679 /* Calculate the result when the argument is a constant. */
7680 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7681 return res;
7683 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7684 fcode = builtin_mathfn_code (arg);
7685 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7687 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7688 arg = fold_build2 (MULT_EXPR, type,
7689 CALL_EXPR_ARG (arg, 0),
7690 build_real (type, dconsthalf));
7691 return build_call_expr (expfn, 1, arg);
7694 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7695 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7697 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7699 if (powfn)
7701 tree arg0 = CALL_EXPR_ARG (arg, 0);
7702 tree tree_root;
7703 /* The inner root was either sqrt or cbrt. */
7704 /* This was a conditional expression but it triggered a bug
7705 in Sun C 5.5. */
7706 REAL_VALUE_TYPE dconstroot;
7707 if (BUILTIN_SQRT_P (fcode))
7708 dconstroot = dconsthalf;
7709 else
7710 dconstroot = dconst_third ();
7712 /* Adjust for the outer root. */
7713 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7714 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7715 tree_root = build_real (type, dconstroot);
7716 return build_call_expr (powfn, 2, arg0, tree_root);
7720 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7721 if (flag_unsafe_math_optimizations
7722 && (fcode == BUILT_IN_POW
7723 || fcode == BUILT_IN_POWF
7724 || fcode == BUILT_IN_POWL))
7726 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7727 tree arg0 = CALL_EXPR_ARG (arg, 0);
7728 tree arg1 = CALL_EXPR_ARG (arg, 1);
7729 tree narg1;
7730 if (!tree_expr_nonnegative_p (arg0))
7731 arg0 = build1 (ABS_EXPR, type, arg0);
7732 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7733 build_real (type, dconsthalf));
7734 return build_call_expr (powfn, 2, arg0, narg1);
7737 return NULL_TREE;
7740 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7741 Return NULL_TREE if no simplification can be made. */
7743 static tree
7744 fold_builtin_cbrt (tree arg, tree type)
7746 const enum built_in_function fcode = builtin_mathfn_code (arg);
7747 tree res;
7749 if (!validate_arg (arg, REAL_TYPE))
7750 return NULL_TREE;
7752 /* Calculate the result when the argument is a constant. */
7753 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7754 return res;
7756 if (flag_unsafe_math_optimizations)
7758 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7759 if (BUILTIN_EXPONENT_P (fcode))
7761 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7762 const REAL_VALUE_TYPE third_trunc =
7763 real_value_truncate (TYPE_MODE (type), dconst_third ());
7764 arg = fold_build2 (MULT_EXPR, type,
7765 CALL_EXPR_ARG (arg, 0),
7766 build_real (type, third_trunc));
7767 return build_call_expr (expfn, 1, arg);
7770 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7771 if (BUILTIN_SQRT_P (fcode))
7773 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7775 if (powfn)
7777 tree arg0 = CALL_EXPR_ARG (arg, 0);
7778 tree tree_root;
7779 REAL_VALUE_TYPE dconstroot = dconst_third ();
7781 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7782 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7783 tree_root = build_real (type, dconstroot);
7784 return build_call_expr (powfn, 2, arg0, tree_root);
7788 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7789 if (BUILTIN_CBRT_P (fcode))
7791 tree arg0 = CALL_EXPR_ARG (arg, 0);
7792 if (tree_expr_nonnegative_p (arg0))
7794 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7796 if (powfn)
7798 tree tree_root;
7799 REAL_VALUE_TYPE dconstroot;
7801 real_arithmetic (&dconstroot, MULT_EXPR,
7802 dconst_third_ptr (), dconst_third_ptr ());
7803 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7804 tree_root = build_real (type, dconstroot);
7805 return build_call_expr (powfn, 2, arg0, tree_root);
7810 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7811 if (fcode == BUILT_IN_POW
7812 || fcode == BUILT_IN_POWF
7813 || fcode == BUILT_IN_POWL)
7815 tree arg00 = CALL_EXPR_ARG (arg, 0);
7816 tree arg01 = CALL_EXPR_ARG (arg, 1);
7817 if (tree_expr_nonnegative_p (arg00))
7819 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7820 const REAL_VALUE_TYPE dconstroot
7821 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7822 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7823 build_real (type, dconstroot));
7824 return build_call_expr (powfn, 2, arg00, narg01);
7828 return NULL_TREE;
7831 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7832 TYPE is the type of the return value. Return NULL_TREE if no
7833 simplification can be made. */
7835 static tree
7836 fold_builtin_cos (tree arg, tree type, tree fndecl)
7838 tree res, narg;
7840 if (!validate_arg (arg, REAL_TYPE))
7841 return NULL_TREE;
7843 /* Calculate the result when the argument is a constant. */
7844 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7845 return res;
7847 /* Optimize cos(-x) into cos (x). */
7848 if ((narg = fold_strip_sign_ops (arg)))
7849 return build_call_expr (fndecl, 1, narg);
7851 return NULL_TREE;
7854 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7855 Return NULL_TREE if no simplification can be made. */
7857 static tree
7858 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7860 if (validate_arg (arg, REAL_TYPE))
7862 tree res, narg;
7864 /* Calculate the result when the argument is a constant. */
7865 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7866 return res;
7868 /* Optimize cosh(-x) into cosh (x). */
7869 if ((narg = fold_strip_sign_ops (arg)))
7870 return build_call_expr (fndecl, 1, narg);
7873 return NULL_TREE;
7876 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7877 Return NULL_TREE if no simplification can be made. */
7879 static tree
7880 fold_builtin_tan (tree arg, tree type)
7882 enum built_in_function fcode;
7883 tree res;
7885 if (!validate_arg (arg, REAL_TYPE))
7886 return NULL_TREE;
7888 /* Calculate the result when the argument is a constant. */
7889 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7890 return res;
7892 /* Optimize tan(atan(x)) = x. */
7893 fcode = builtin_mathfn_code (arg);
7894 if (flag_unsafe_math_optimizations
7895 && (fcode == BUILT_IN_ATAN
7896 || fcode == BUILT_IN_ATANF
7897 || fcode == BUILT_IN_ATANL))
7898 return CALL_EXPR_ARG (arg, 0);
7900 return NULL_TREE;
7903 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7904 NULL_TREE if no simplification can be made. */
7906 static tree
7907 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7909 tree type;
7910 tree res, fn, call;
7912 if (!validate_arg (arg0, REAL_TYPE)
7913 || !validate_arg (arg1, POINTER_TYPE)
7914 || !validate_arg (arg2, POINTER_TYPE))
7915 return NULL_TREE;
7917 type = TREE_TYPE (arg0);
7919 /* Calculate the result when the argument is a constant. */
7920 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7921 return res;
7923 /* Canonicalize sincos to cexpi. */
7924 if (!TARGET_C99_FUNCTIONS)
7925 return NULL_TREE;
7926 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7927 if (!fn)
7928 return NULL_TREE;
7930 call = build_call_expr (fn, 1, arg0);
7931 call = builtin_save_expr (call);
7933 return build2 (COMPOUND_EXPR, void_type_node,
7934 build2 (MODIFY_EXPR, void_type_node,
7935 build_fold_indirect_ref (arg1),
7936 build1 (IMAGPART_EXPR, type, call)),
7937 build2 (MODIFY_EXPR, void_type_node,
7938 build_fold_indirect_ref (arg2),
7939 build1 (REALPART_EXPR, type, call)));
7942 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7943 NULL_TREE if no simplification can be made. */
7945 static tree
7946 fold_builtin_cexp (tree arg0, tree type)
7948 tree rtype;
7949 tree realp, imagp, ifn;
7951 if (!validate_arg (arg0, COMPLEX_TYPE))
7952 return NULL_TREE;
7954 rtype = TREE_TYPE (TREE_TYPE (arg0));
7956 /* In case we can figure out the real part of arg0 and it is constant zero
7957 fold to cexpi. */
7958 if (!TARGET_C99_FUNCTIONS)
7959 return NULL_TREE;
7960 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7961 if (!ifn)
7962 return NULL_TREE;
7964 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7965 && real_zerop (realp))
7967 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7968 return build_call_expr (ifn, 1, narg);
7971 /* In case we can easily decompose real and imaginary parts split cexp
7972 to exp (r) * cexpi (i). */
7973 if (flag_unsafe_math_optimizations
7974 && realp)
7976 tree rfn, rcall, icall;
7978 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7979 if (!rfn)
7980 return NULL_TREE;
7982 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7983 if (!imagp)
7984 return NULL_TREE;
7986 icall = build_call_expr (ifn, 1, imagp);
7987 icall = builtin_save_expr (icall);
7988 rcall = build_call_expr (rfn, 1, realp);
7989 rcall = builtin_save_expr (rcall);
7990 return fold_build2 (COMPLEX_EXPR, type,
7991 fold_build2 (MULT_EXPR, rtype,
7992 rcall,
7993 fold_build1 (REALPART_EXPR, rtype, icall)),
7994 fold_build2 (MULT_EXPR, rtype,
7995 rcall,
7996 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7999 return NULL_TREE;
8002 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8003 Return NULL_TREE if no simplification can be made. */
8005 static tree
8006 fold_builtin_trunc (tree fndecl, tree arg)
8008 if (!validate_arg (arg, REAL_TYPE))
8009 return NULL_TREE;
8011 /* Optimize trunc of constant value. */
8012 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8014 REAL_VALUE_TYPE r, x;
8015 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8017 x = TREE_REAL_CST (arg);
8018 real_trunc (&r, TYPE_MODE (type), &x);
8019 return build_real (type, r);
8022 return fold_trunc_transparent_mathfn (fndecl, arg);
8025 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8026 Return NULL_TREE if no simplification can be made. */
8028 static tree
8029 fold_builtin_floor (tree fndecl, tree arg)
8031 if (!validate_arg (arg, REAL_TYPE))
8032 return NULL_TREE;
8034 /* Optimize floor of constant value. */
8035 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8037 REAL_VALUE_TYPE x;
8039 x = TREE_REAL_CST (arg);
8040 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8042 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8043 REAL_VALUE_TYPE r;
8045 real_floor (&r, TYPE_MODE (type), &x);
8046 return build_real (type, r);
8050 /* Fold floor (x) where x is nonnegative to trunc (x). */
8051 if (tree_expr_nonnegative_p (arg))
8053 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8054 if (truncfn)
8055 return build_call_expr (truncfn, 1, arg);
8058 return fold_trunc_transparent_mathfn (fndecl, arg);
8061 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8062 Return NULL_TREE if no simplification can be made. */
8064 static tree
8065 fold_builtin_ceil (tree fndecl, tree arg)
8067 if (!validate_arg (arg, REAL_TYPE))
8068 return NULL_TREE;
8070 /* Optimize ceil of constant value. */
8071 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8073 REAL_VALUE_TYPE x;
8075 x = TREE_REAL_CST (arg);
8076 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8078 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8079 REAL_VALUE_TYPE r;
8081 real_ceil (&r, TYPE_MODE (type), &x);
8082 return build_real (type, r);
8086 return fold_trunc_transparent_mathfn (fndecl, arg);
8089 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8090 Return NULL_TREE if no simplification can be made. */
8092 static tree
8093 fold_builtin_round (tree fndecl, tree arg)
8095 if (!validate_arg (arg, REAL_TYPE))
8096 return NULL_TREE;
8098 /* Optimize round of constant value. */
8099 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8101 REAL_VALUE_TYPE x;
8103 x = TREE_REAL_CST (arg);
8104 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8106 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8107 REAL_VALUE_TYPE r;
8109 real_round (&r, TYPE_MODE (type), &x);
8110 return build_real (type, r);
8114 return fold_trunc_transparent_mathfn (fndecl, arg);
8117 /* Fold function call to builtin lround, lroundf or lroundl (or the
8118 corresponding long long versions) and other rounding functions. ARG
8119 is the argument to the call. Return NULL_TREE if no simplification
8120 can be made. */
8122 static tree
8123 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8125 if (!validate_arg (arg, REAL_TYPE))
8126 return NULL_TREE;
8128 /* Optimize lround of constant value. */
8129 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8131 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8133 if (real_isfinite (&x))
8135 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8136 tree ftype = TREE_TYPE (arg);
8137 unsigned HOST_WIDE_INT lo2;
8138 HOST_WIDE_INT hi, lo;
8139 REAL_VALUE_TYPE r;
8141 switch (DECL_FUNCTION_CODE (fndecl))
8143 CASE_FLT_FN (BUILT_IN_LFLOOR):
8144 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8145 real_floor (&r, TYPE_MODE (ftype), &x);
8146 break;
8148 CASE_FLT_FN (BUILT_IN_LCEIL):
8149 CASE_FLT_FN (BUILT_IN_LLCEIL):
8150 real_ceil (&r, TYPE_MODE (ftype), &x);
8151 break;
8153 CASE_FLT_FN (BUILT_IN_LROUND):
8154 CASE_FLT_FN (BUILT_IN_LLROUND):
8155 real_round (&r, TYPE_MODE (ftype), &x);
8156 break;
8158 default:
8159 gcc_unreachable ();
8162 REAL_VALUE_TO_INT (&lo, &hi, r);
8163 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8164 return build_int_cst_wide (itype, lo2, hi);
8168 switch (DECL_FUNCTION_CODE (fndecl))
8170 CASE_FLT_FN (BUILT_IN_LFLOOR):
8171 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8172 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8173 if (tree_expr_nonnegative_p (arg))
8174 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8175 arg);
8176 break;
8177 default:;
8180 return fold_fixed_mathfn (fndecl, arg);
8183 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8184 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8185 the argument to the call. Return NULL_TREE if no simplification can
8186 be made. */
8188 static tree
8189 fold_builtin_bitop (tree fndecl, tree arg)
8191 if (!validate_arg (arg, INTEGER_TYPE))
8192 return NULL_TREE;
8194 /* Optimize for constant argument. */
8195 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8197 HOST_WIDE_INT hi, width, result;
8198 unsigned HOST_WIDE_INT lo;
8199 tree type;
8201 type = TREE_TYPE (arg);
8202 width = TYPE_PRECISION (type);
8203 lo = TREE_INT_CST_LOW (arg);
8205 /* Clear all the bits that are beyond the type's precision. */
8206 if (width > HOST_BITS_PER_WIDE_INT)
8208 hi = TREE_INT_CST_HIGH (arg);
8209 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8210 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8212 else
8214 hi = 0;
8215 if (width < HOST_BITS_PER_WIDE_INT)
8216 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8219 switch (DECL_FUNCTION_CODE (fndecl))
8221 CASE_INT_FN (BUILT_IN_FFS):
8222 if (lo != 0)
8223 result = exact_log2 (lo & -lo) + 1;
8224 else if (hi != 0)
8225 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8226 else
8227 result = 0;
8228 break;
8230 CASE_INT_FN (BUILT_IN_CLZ):
8231 if (hi != 0)
8232 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8233 else if (lo != 0)
8234 result = width - floor_log2 (lo) - 1;
8235 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8236 result = width;
8237 break;
8239 CASE_INT_FN (BUILT_IN_CTZ):
8240 if (lo != 0)
8241 result = exact_log2 (lo & -lo);
8242 else if (hi != 0)
8243 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8244 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8245 result = width;
8246 break;
8248 CASE_INT_FN (BUILT_IN_POPCOUNT):
8249 result = 0;
8250 while (lo)
8251 result++, lo &= lo - 1;
8252 while (hi)
8253 result++, hi &= hi - 1;
8254 break;
8256 CASE_INT_FN (BUILT_IN_PARITY):
8257 result = 0;
8258 while (lo)
8259 result++, lo &= lo - 1;
8260 while (hi)
8261 result++, hi &= hi - 1;
8262 result &= 1;
8263 break;
8265 default:
8266 gcc_unreachable ();
8269 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8272 return NULL_TREE;
8275 /* Fold function call to builtin_bswap and the long and long long
8276 variants. Return NULL_TREE if no simplification can be made. */
8277 static tree
8278 fold_builtin_bswap (tree fndecl, tree arg)
8280 if (! validate_arg (arg, INTEGER_TYPE))
8281 return NULL_TREE;
8283 /* Optimize constant value. */
8284 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8286 HOST_WIDE_INT hi, width, r_hi = 0;
8287 unsigned HOST_WIDE_INT lo, r_lo = 0;
8288 tree type;
8290 type = TREE_TYPE (arg);
8291 width = TYPE_PRECISION (type);
8292 lo = TREE_INT_CST_LOW (arg);
8293 hi = TREE_INT_CST_HIGH (arg);
8295 switch (DECL_FUNCTION_CODE (fndecl))
8297 case BUILT_IN_BSWAP32:
8298 case BUILT_IN_BSWAP64:
8300 int s;
8302 for (s = 0; s < width; s += 8)
8304 int d = width - s - 8;
8305 unsigned HOST_WIDE_INT byte;
8307 if (s < HOST_BITS_PER_WIDE_INT)
8308 byte = (lo >> s) & 0xff;
8309 else
8310 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8312 if (d < HOST_BITS_PER_WIDE_INT)
8313 r_lo |= byte << d;
8314 else
8315 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8319 break;
8321 default:
8322 gcc_unreachable ();
8325 if (width < HOST_BITS_PER_WIDE_INT)
8326 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8327 else
8328 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8331 return NULL_TREE;
8334 /* A subroutine of fold_builtin to fold the various logarithmic
8335 functions. Return NULL_TREE if no simplification can me made.
8336 FUNC is the corresponding MPFR logarithm function. */
8338 static tree
8339 fold_builtin_logarithm (tree fndecl, tree arg,
8340 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8342 if (validate_arg (arg, REAL_TYPE))
8344 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8345 tree res;
8346 const enum built_in_function fcode = builtin_mathfn_code (arg);
8348 /* Calculate the result when the argument is a constant. */
8349 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8350 return res;
8352 /* Special case, optimize logN(expN(x)) = x. */
8353 if (flag_unsafe_math_optimizations
8354 && ((func == mpfr_log
8355 && (fcode == BUILT_IN_EXP
8356 || fcode == BUILT_IN_EXPF
8357 || fcode == BUILT_IN_EXPL))
8358 || (func == mpfr_log2
8359 && (fcode == BUILT_IN_EXP2
8360 || fcode == BUILT_IN_EXP2F
8361 || fcode == BUILT_IN_EXP2L))
8362 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8363 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8365 /* Optimize logN(func()) for various exponential functions. We
8366 want to determine the value "x" and the power "exponent" in
8367 order to transform logN(x**exponent) into exponent*logN(x). */
8368 if (flag_unsafe_math_optimizations)
8370 tree exponent = 0, x = 0;
8372 switch (fcode)
8374 CASE_FLT_FN (BUILT_IN_EXP):
8375 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8376 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8377 dconst_e ()));
8378 exponent = CALL_EXPR_ARG (arg, 0);
8379 break;
8380 CASE_FLT_FN (BUILT_IN_EXP2):
8381 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8382 x = build_real (type, dconst2);
8383 exponent = CALL_EXPR_ARG (arg, 0);
8384 break;
8385 CASE_FLT_FN (BUILT_IN_EXP10):
8386 CASE_FLT_FN (BUILT_IN_POW10):
8387 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8389 REAL_VALUE_TYPE dconst10;
8390 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8391 x = build_real (type, dconst10);
8393 exponent = CALL_EXPR_ARG (arg, 0);
8394 break;
8395 CASE_FLT_FN (BUILT_IN_SQRT):
8396 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8397 x = CALL_EXPR_ARG (arg, 0);
8398 exponent = build_real (type, dconsthalf);
8399 break;
8400 CASE_FLT_FN (BUILT_IN_CBRT):
8401 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8402 x = CALL_EXPR_ARG (arg, 0);
8403 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8404 dconst_third ()));
8405 break;
8406 CASE_FLT_FN (BUILT_IN_POW):
8407 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8408 x = CALL_EXPR_ARG (arg, 0);
8409 exponent = CALL_EXPR_ARG (arg, 1);
8410 break;
8411 default:
8412 break;
8415 /* Now perform the optimization. */
8416 if (x && exponent)
8418 tree logfn = build_call_expr (fndecl, 1, x);
8419 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8424 return NULL_TREE;
8427 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8428 NULL_TREE if no simplification can be made. */
8430 static tree
8431 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8433 tree res, narg0, narg1;
8435 if (!validate_arg (arg0, REAL_TYPE)
8436 || !validate_arg (arg1, REAL_TYPE))
8437 return NULL_TREE;
8439 /* Calculate the result when the argument is a constant. */
8440 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8441 return res;
8443 /* If either argument to hypot has a negate or abs, strip that off.
8444 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8445 narg0 = fold_strip_sign_ops (arg0);
8446 narg1 = fold_strip_sign_ops (arg1);
8447 if (narg0 || narg1)
8449 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8450 narg1 ? narg1 : arg1);
8453 /* If either argument is zero, hypot is fabs of the other. */
8454 if (real_zerop (arg0))
8455 return fold_build1 (ABS_EXPR, type, arg1);
8456 else if (real_zerop (arg1))
8457 return fold_build1 (ABS_EXPR, type, arg0);
8459 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8460 if (flag_unsafe_math_optimizations
8461 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8463 const REAL_VALUE_TYPE sqrt2_trunc
8464 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8465 return fold_build2 (MULT_EXPR, type,
8466 fold_build1 (ABS_EXPR, type, arg0),
8467 build_real (type, sqrt2_trunc));
8470 return NULL_TREE;
8474 /* Fold a builtin function call to pow, powf, or powl. Return
8475 NULL_TREE if no simplification can be made. */
8476 static tree
8477 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8479 tree res;
8481 if (!validate_arg (arg0, REAL_TYPE)
8482 || !validate_arg (arg1, REAL_TYPE))
8483 return NULL_TREE;
8485 /* Calculate the result when the argument is a constant. */
8486 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8487 return res;
8489 /* Optimize pow(1.0,y) = 1.0. */
8490 if (real_onep (arg0))
8491 return omit_one_operand (type, build_real (type, dconst1), arg1);
8493 if (TREE_CODE (arg1) == REAL_CST
8494 && !TREE_OVERFLOW (arg1))
8496 REAL_VALUE_TYPE cint;
8497 REAL_VALUE_TYPE c;
8498 HOST_WIDE_INT n;
8500 c = TREE_REAL_CST (arg1);
8502 /* Optimize pow(x,0.0) = 1.0. */
8503 if (REAL_VALUES_EQUAL (c, dconst0))
8504 return omit_one_operand (type, build_real (type, dconst1),
8505 arg0);
8507 /* Optimize pow(x,1.0) = x. */
8508 if (REAL_VALUES_EQUAL (c, dconst1))
8509 return arg0;
8511 /* Optimize pow(x,-1.0) = 1.0/x. */
8512 if (REAL_VALUES_EQUAL (c, dconstm1))
8513 return fold_build2 (RDIV_EXPR, type,
8514 build_real (type, dconst1), arg0);
8516 /* Optimize pow(x,0.5) = sqrt(x). */
8517 if (flag_unsafe_math_optimizations
8518 && REAL_VALUES_EQUAL (c, dconsthalf))
8520 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8522 if (sqrtfn != NULL_TREE)
8523 return build_call_expr (sqrtfn, 1, arg0);
8526 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8527 if (flag_unsafe_math_optimizations)
8529 const REAL_VALUE_TYPE dconstroot
8530 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8532 if (REAL_VALUES_EQUAL (c, dconstroot))
8534 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8535 if (cbrtfn != NULL_TREE)
8536 return build_call_expr (cbrtfn, 1, arg0);
8540 /* Check for an integer exponent. */
8541 n = real_to_integer (&c);
8542 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8543 if (real_identical (&c, &cint))
8545 /* Attempt to evaluate pow at compile-time, unless this should
8546 raise an exception. */
8547 if (TREE_CODE (arg0) == REAL_CST
8548 && !TREE_OVERFLOW (arg0)
8549 && (n > 0
8550 || (!flag_trapping_math && !flag_errno_math)
8551 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8553 REAL_VALUE_TYPE x;
8554 bool inexact;
8556 x = TREE_REAL_CST (arg0);
8557 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8558 if (flag_unsafe_math_optimizations || !inexact)
8559 return build_real (type, x);
8562 /* Strip sign ops from even integer powers. */
8563 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8565 tree narg0 = fold_strip_sign_ops (arg0);
8566 if (narg0)
8567 return build_call_expr (fndecl, 2, narg0, arg1);
8572 if (flag_unsafe_math_optimizations)
8574 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8576 /* Optimize pow(expN(x),y) = expN(x*y). */
8577 if (BUILTIN_EXPONENT_P (fcode))
8579 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8580 tree arg = CALL_EXPR_ARG (arg0, 0);
8581 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8582 return build_call_expr (expfn, 1, arg);
8585 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8586 if (BUILTIN_SQRT_P (fcode))
8588 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8589 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8590 build_real (type, dconsthalf));
8591 return build_call_expr (fndecl, 2, narg0, narg1);
8594 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8595 if (BUILTIN_CBRT_P (fcode))
8597 tree arg = CALL_EXPR_ARG (arg0, 0);
8598 if (tree_expr_nonnegative_p (arg))
8600 const REAL_VALUE_TYPE dconstroot
8601 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8602 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8603 build_real (type, dconstroot));
8604 return build_call_expr (fndecl, 2, arg, narg1);
8608 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8609 if (fcode == BUILT_IN_POW
8610 || fcode == BUILT_IN_POWF
8611 || fcode == BUILT_IN_POWL)
8613 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8614 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8615 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8616 return build_call_expr (fndecl, 2, arg00, narg1);
8620 return NULL_TREE;
8623 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8624 Return NULL_TREE if no simplification can be made. */
8625 static tree
8626 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8627 tree arg0, tree arg1, tree type)
8629 if (!validate_arg (arg0, REAL_TYPE)
8630 || !validate_arg (arg1, INTEGER_TYPE))
8631 return NULL_TREE;
8633 /* Optimize pow(1.0,y) = 1.0. */
8634 if (real_onep (arg0))
8635 return omit_one_operand (type, build_real (type, dconst1), arg1);
8637 if (host_integerp (arg1, 0))
8639 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8641 /* Evaluate powi at compile-time. */
8642 if (TREE_CODE (arg0) == REAL_CST
8643 && !TREE_OVERFLOW (arg0))
8645 REAL_VALUE_TYPE x;
8646 x = TREE_REAL_CST (arg0);
8647 real_powi (&x, TYPE_MODE (type), &x, c);
8648 return build_real (type, x);
8651 /* Optimize pow(x,0) = 1.0. */
8652 if (c == 0)
8653 return omit_one_operand (type, build_real (type, dconst1),
8654 arg0);
8656 /* Optimize pow(x,1) = x. */
8657 if (c == 1)
8658 return arg0;
8660 /* Optimize pow(x,-1) = 1.0/x. */
8661 if (c == -1)
8662 return fold_build2 (RDIV_EXPR, type,
8663 build_real (type, dconst1), arg0);
8666 return NULL_TREE;
8669 /* A subroutine of fold_builtin to fold the various exponent
8670 functions. Return NULL_TREE if no simplification can be made.
8671 FUNC is the corresponding MPFR exponent function. */
8673 static tree
8674 fold_builtin_exponent (tree fndecl, tree arg,
8675 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8677 if (validate_arg (arg, REAL_TYPE))
8679 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8680 tree res;
8682 /* Calculate the result when the argument is a constant. */
8683 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8684 return res;
8686 /* Optimize expN(logN(x)) = x. */
8687 if (flag_unsafe_math_optimizations)
8689 const enum built_in_function fcode = builtin_mathfn_code (arg);
8691 if ((func == mpfr_exp
8692 && (fcode == BUILT_IN_LOG
8693 || fcode == BUILT_IN_LOGF
8694 || fcode == BUILT_IN_LOGL))
8695 || (func == mpfr_exp2
8696 && (fcode == BUILT_IN_LOG2
8697 || fcode == BUILT_IN_LOG2F
8698 || fcode == BUILT_IN_LOG2L))
8699 || (func == mpfr_exp10
8700 && (fcode == BUILT_IN_LOG10
8701 || fcode == BUILT_IN_LOG10F
8702 || fcode == BUILT_IN_LOG10L)))
8703 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8707 return NULL_TREE;
8710 /* Return true if VAR is a VAR_DECL or a component thereof. */
8712 static bool
8713 var_decl_component_p (tree var)
8715 tree inner = var;
8716 while (handled_component_p (inner))
8717 inner = TREE_OPERAND (inner, 0);
8718 return SSA_VAR_P (inner);
8721 /* Fold function call to builtin memset. Return
8722 NULL_TREE if no simplification can be made. */
8724 static tree
8725 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8727 tree var, ret;
8728 unsigned HOST_WIDE_INT length, cval;
8730 if (! validate_arg (dest, POINTER_TYPE)
8731 || ! validate_arg (c, INTEGER_TYPE)
8732 || ! validate_arg (len, INTEGER_TYPE))
8733 return NULL_TREE;
8735 if (! host_integerp (len, 1))
8736 return NULL_TREE;
8738 /* If the LEN parameter is zero, return DEST. */
8739 if (integer_zerop (len))
8740 return omit_one_operand (type, dest, c);
8742 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8743 return NULL_TREE;
8745 var = dest;
8746 STRIP_NOPS (var);
8747 if (TREE_CODE (var) != ADDR_EXPR)
8748 return NULL_TREE;
8750 var = TREE_OPERAND (var, 0);
8751 if (TREE_THIS_VOLATILE (var))
8752 return NULL_TREE;
8754 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8755 && !POINTER_TYPE_P (TREE_TYPE (var)))
8756 return NULL_TREE;
8758 if (! var_decl_component_p (var))
8759 return NULL_TREE;
8761 length = tree_low_cst (len, 1);
8762 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8763 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8764 < (int) length)
8765 return NULL_TREE;
8767 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8768 return NULL_TREE;
8770 if (integer_zerop (c))
8771 cval = 0;
8772 else
8774 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8775 return NULL_TREE;
8777 cval = tree_low_cst (c, 1);
8778 cval &= 0xff;
8779 cval |= cval << 8;
8780 cval |= cval << 16;
8781 cval |= (cval << 31) << 1;
8784 ret = build_int_cst_type (TREE_TYPE (var), cval);
8785 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8786 if (ignore)
8787 return ret;
8789 return omit_one_operand (type, dest, ret);
8792 /* Fold function call to builtin memset. Return
8793 NULL_TREE if no simplification can be made. */
8795 static tree
8796 fold_builtin_bzero (tree dest, tree size, bool ignore)
8798 if (! validate_arg (dest, POINTER_TYPE)
8799 || ! validate_arg (size, INTEGER_TYPE))
8800 return NULL_TREE;
8802 if (!ignore)
8803 return NULL_TREE;
8805 /* New argument list transforming bzero(ptr x, int y) to
8806 memset(ptr x, int 0, size_t y). This is done this way
8807 so that if it isn't expanded inline, we fallback to
8808 calling bzero instead of memset. */
8810 return fold_builtin_memset (dest, integer_zero_node,
8811 fold_convert (sizetype, size),
8812 void_type_node, ignore);
8815 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8816 NULL_TREE if no simplification can be made.
8817 If ENDP is 0, return DEST (like memcpy).
8818 If ENDP is 1, return DEST+LEN (like mempcpy).
8819 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8820 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8821 (memmove). */
8823 static tree
8824 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8826 tree destvar, srcvar, expr;
8828 if (! validate_arg (dest, POINTER_TYPE)
8829 || ! validate_arg (src, POINTER_TYPE)
8830 || ! validate_arg (len, INTEGER_TYPE))
8831 return NULL_TREE;
8833 /* If the LEN parameter is zero, return DEST. */
8834 if (integer_zerop (len))
8835 return omit_one_operand (type, dest, src);
8837 /* If SRC and DEST are the same (and not volatile), return
8838 DEST{,+LEN,+LEN-1}. */
8839 if (operand_equal_p (src, dest, 0))
8840 expr = len;
8841 else
8843 tree srctype, desttype;
8844 int src_align, dest_align;
8846 if (endp == 3)
8848 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8849 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8851 /* Both DEST and SRC must be pointer types.
8852 ??? This is what old code did. Is the testing for pointer types
8853 really mandatory?
8855 If either SRC is readonly or length is 1, we can use memcpy. */
8856 if (!dest_align || !src_align)
8857 return NULL_TREE;
8858 if (readonly_data_expr (src)
8859 || (host_integerp (len, 1)
8860 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8861 >= tree_low_cst (len, 1))))
8863 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8864 if (!fn)
8865 return NULL_TREE;
8866 return build_call_expr (fn, 3, dest, src, len);
8869 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8870 srcvar = build_fold_indirect_ref (src);
8871 destvar = build_fold_indirect_ref (dest);
8872 if (srcvar
8873 && !TREE_THIS_VOLATILE (srcvar)
8874 && destvar
8875 && !TREE_THIS_VOLATILE (destvar))
8877 tree src_base, dest_base, fn;
8878 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8879 HOST_WIDE_INT size = -1;
8880 HOST_WIDE_INT maxsize = -1;
8882 src_base = srcvar;
8883 if (handled_component_p (src_base))
8884 src_base = get_ref_base_and_extent (src_base, &src_offset,
8885 &size, &maxsize);
8886 dest_base = destvar;
8887 if (handled_component_p (dest_base))
8888 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8889 &size, &maxsize);
8890 if (host_integerp (len, 1))
8892 maxsize = tree_low_cst (len, 1);
8893 if (maxsize
8894 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8895 maxsize = -1;
8896 else
8897 maxsize *= BITS_PER_UNIT;
8899 else
8900 maxsize = -1;
8901 if (SSA_VAR_P (src_base)
8902 && SSA_VAR_P (dest_base))
8904 if (operand_equal_p (src_base, dest_base, 0)
8905 && ranges_overlap_p (src_offset, maxsize,
8906 dest_offset, maxsize))
8907 return NULL_TREE;
8909 else if (TREE_CODE (src_base) == INDIRECT_REF
8910 && TREE_CODE (dest_base) == INDIRECT_REF)
8912 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8913 TREE_OPERAND (dest_base, 0), 0)
8914 || ranges_overlap_p (src_offset, maxsize,
8915 dest_offset, maxsize))
8916 return NULL_TREE;
8918 else
8919 return NULL_TREE;
8921 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8922 if (!fn)
8923 return NULL_TREE;
8924 return build_call_expr (fn, 3, dest, src, len);
8926 return NULL_TREE;
8929 if (!host_integerp (len, 0))
8930 return NULL_TREE;
8931 /* FIXME:
8932 This logic lose for arguments like (type *)malloc (sizeof (type)),
8933 since we strip the casts of up to VOID return value from malloc.
8934 Perhaps we ought to inherit type from non-VOID argument here? */
8935 STRIP_NOPS (src);
8936 STRIP_NOPS (dest);
8937 srctype = TREE_TYPE (TREE_TYPE (src));
8938 desttype = TREE_TYPE (TREE_TYPE (dest));
8939 if (!srctype || !desttype
8940 || !TYPE_SIZE_UNIT (srctype)
8941 || !TYPE_SIZE_UNIT (desttype)
8942 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8943 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8944 || TYPE_VOLATILE (srctype)
8945 || TYPE_VOLATILE (desttype))
8946 return NULL_TREE;
8948 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8949 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8950 if (dest_align < (int) TYPE_ALIGN (desttype)
8951 || src_align < (int) TYPE_ALIGN (srctype))
8952 return NULL_TREE;
8954 if (!ignore)
8955 dest = builtin_save_expr (dest);
8957 srcvar = NULL_TREE;
8958 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8960 srcvar = build_fold_indirect_ref (src);
8961 if (TREE_THIS_VOLATILE (srcvar))
8962 return NULL_TREE;
8963 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8964 srcvar = NULL_TREE;
8965 /* With memcpy, it is possible to bypass aliasing rules, so without
8966 this check i.e. execute/20060930-2.c would be misoptimized,
8967 because it use conflicting alias set to hold argument for the
8968 memcpy call. This check is probably unnecessary with
8969 -fno-strict-aliasing. Similarly for destvar. See also
8970 PR29286. */
8971 else if (!var_decl_component_p (srcvar))
8972 srcvar = NULL_TREE;
8975 destvar = NULL_TREE;
8976 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8978 destvar = build_fold_indirect_ref (dest);
8979 if (TREE_THIS_VOLATILE (destvar))
8980 return NULL_TREE;
8981 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8982 destvar = NULL_TREE;
8983 else if (!var_decl_component_p (destvar))
8984 destvar = NULL_TREE;
8987 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8988 return NULL_TREE;
8990 if (srcvar == NULL_TREE)
8992 tree srcptype;
8993 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8994 return NULL_TREE;
8996 srctype = build_qualified_type (desttype, 0);
8997 if (src_align < (int) TYPE_ALIGN (srctype))
8999 if (AGGREGATE_TYPE_P (srctype)
9000 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
9001 return NULL_TREE;
9003 srctype = build_variant_type_copy (srctype);
9004 TYPE_ALIGN (srctype) = src_align;
9005 TYPE_USER_ALIGN (srctype) = 1;
9006 TYPE_PACKED (srctype) = 1;
9008 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
9009 src = fold_convert (srcptype, src);
9010 srcvar = build_fold_indirect_ref (src);
9012 else if (destvar == NULL_TREE)
9014 tree destptype;
9015 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9016 return NULL_TREE;
9018 desttype = build_qualified_type (srctype, 0);
9019 if (dest_align < (int) TYPE_ALIGN (desttype))
9021 if (AGGREGATE_TYPE_P (desttype)
9022 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9023 return NULL_TREE;
9025 desttype = build_variant_type_copy (desttype);
9026 TYPE_ALIGN (desttype) = dest_align;
9027 TYPE_USER_ALIGN (desttype) = 1;
9028 TYPE_PACKED (desttype) = 1;
9030 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9031 dest = fold_convert (destptype, dest);
9032 destvar = build_fold_indirect_ref (dest);
9035 if (srctype == desttype
9036 || (gimple_in_ssa_p (cfun)
9037 && useless_type_conversion_p (desttype, srctype)))
9038 expr = srcvar;
9039 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9040 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9041 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9042 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9043 expr = fold_convert (TREE_TYPE (destvar), srcvar);
9044 else
9045 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
9046 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9049 if (ignore)
9050 return expr;
9052 if (endp == 0 || endp == 3)
9053 return omit_one_operand (type, dest, expr);
9055 if (expr == len)
9056 expr = NULL_TREE;
9058 if (endp == 2)
9059 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
9060 ssize_int (1));
9062 len = fold_convert (sizetype, len);
9063 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9064 dest = fold_convert (type, dest);
9065 if (expr)
9066 dest = omit_one_operand (type, dest, expr);
9067 return dest;
9070 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9071 If LEN is not NULL, it represents the length of the string to be
9072 copied. Return NULL_TREE if no simplification can be made. */
9074 tree
9075 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
9077 tree fn;
9079 if (!validate_arg (dest, POINTER_TYPE)
9080 || !validate_arg (src, POINTER_TYPE))
9081 return NULL_TREE;
9083 /* If SRC and DEST are the same (and not volatile), return DEST. */
9084 if (operand_equal_p (src, dest, 0))
9085 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
9087 if (optimize_function_for_size_p (cfun))
9088 return NULL_TREE;
9090 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9091 if (!fn)
9092 return NULL_TREE;
9094 if (!len)
9096 len = c_strlen (src, 1);
9097 if (! len || TREE_SIDE_EFFECTS (len))
9098 return NULL_TREE;
9101 len = size_binop (PLUS_EXPR, len, ssize_int (1));
9102 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9103 build_call_expr (fn, 3, dest, src, len));
9106 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9107 If SLEN is not NULL, it represents the length of the source string.
9108 Return NULL_TREE if no simplification can be made. */
9110 tree
9111 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
9113 tree fn;
9115 if (!validate_arg (dest, POINTER_TYPE)
9116 || !validate_arg (src, POINTER_TYPE)
9117 || !validate_arg (len, INTEGER_TYPE))
9118 return NULL_TREE;
9120 /* If the LEN parameter is zero, return DEST. */
9121 if (integer_zerop (len))
9122 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9124 /* We can't compare slen with len as constants below if len is not a
9125 constant. */
9126 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9127 return NULL_TREE;
9129 if (!slen)
9130 slen = c_strlen (src, 1);
9132 /* Now, we must be passed a constant src ptr parameter. */
9133 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9134 return NULL_TREE;
9136 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
9138 /* We do not support simplification of this case, though we do
9139 support it when expanding trees into RTL. */
9140 /* FIXME: generate a call to __builtin_memset. */
9141 if (tree_int_cst_lt (slen, len))
9142 return NULL_TREE;
9144 /* OK transform into builtin memcpy. */
9145 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9146 if (!fn)
9147 return NULL_TREE;
9148 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9149 build_call_expr (fn, 3, dest, src, len));
9152 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9153 arguments to the call, and TYPE is its return type.
9154 Return NULL_TREE if no simplification can be made. */
9156 static tree
9157 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
9159 if (!validate_arg (arg1, POINTER_TYPE)
9160 || !validate_arg (arg2, INTEGER_TYPE)
9161 || !validate_arg (len, INTEGER_TYPE))
9162 return NULL_TREE;
9163 else
9165 const char *p1;
9167 if (TREE_CODE (arg2) != INTEGER_CST
9168 || !host_integerp (len, 1))
9169 return NULL_TREE;
9171 p1 = c_getstr (arg1);
9172 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9174 char c;
9175 const char *r;
9176 tree tem;
9178 if (target_char_cast (arg2, &c))
9179 return NULL_TREE;
9181 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9183 if (r == NULL)
9184 return build_int_cst (TREE_TYPE (arg1), 0);
9186 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9187 size_int (r - p1));
9188 return fold_convert (type, tem);
9190 return NULL_TREE;
9194 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9195 Return NULL_TREE if no simplification can be made. */
9197 static tree
9198 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9200 const char *p1, *p2;
9202 if (!validate_arg (arg1, POINTER_TYPE)
9203 || !validate_arg (arg2, POINTER_TYPE)
9204 || !validate_arg (len, INTEGER_TYPE))
9205 return NULL_TREE;
9207 /* If the LEN parameter is zero, return zero. */
9208 if (integer_zerop (len))
9209 return omit_two_operands (integer_type_node, integer_zero_node,
9210 arg1, arg2);
9212 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9213 if (operand_equal_p (arg1, arg2, 0))
9214 return omit_one_operand (integer_type_node, integer_zero_node, len);
9216 p1 = c_getstr (arg1);
9217 p2 = c_getstr (arg2);
9219 /* If all arguments are constant, and the value of len is not greater
9220 than the lengths of arg1 and arg2, evaluate at compile-time. */
9221 if (host_integerp (len, 1) && p1 && p2
9222 && compare_tree_int (len, strlen (p1) + 1) <= 0
9223 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9225 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9227 if (r > 0)
9228 return integer_one_node;
9229 else if (r < 0)
9230 return integer_minus_one_node;
9231 else
9232 return integer_zero_node;
9235 /* If len parameter is one, return an expression corresponding to
9236 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9237 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9239 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9240 tree cst_uchar_ptr_node
9241 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9243 tree ind1 = fold_convert (integer_type_node,
9244 build1 (INDIRECT_REF, cst_uchar_node,
9245 fold_convert (cst_uchar_ptr_node,
9246 arg1)));
9247 tree ind2 = fold_convert (integer_type_node,
9248 build1 (INDIRECT_REF, cst_uchar_node,
9249 fold_convert (cst_uchar_ptr_node,
9250 arg2)));
9251 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9254 return NULL_TREE;
9257 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9258 Return NULL_TREE if no simplification can be made. */
9260 static tree
9261 fold_builtin_strcmp (tree arg1, tree arg2)
9263 const char *p1, *p2;
9265 if (!validate_arg (arg1, POINTER_TYPE)
9266 || !validate_arg (arg2, POINTER_TYPE))
9267 return NULL_TREE;
9269 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9270 if (operand_equal_p (arg1, arg2, 0))
9271 return integer_zero_node;
9273 p1 = c_getstr (arg1);
9274 p2 = c_getstr (arg2);
9276 if (p1 && p2)
9278 const int i = strcmp (p1, p2);
9279 if (i < 0)
9280 return integer_minus_one_node;
9281 else if (i > 0)
9282 return integer_one_node;
9283 else
9284 return integer_zero_node;
9287 /* If the second arg is "", return *(const unsigned char*)arg1. */
9288 if (p2 && *p2 == '\0')
9290 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9291 tree cst_uchar_ptr_node
9292 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9294 return fold_convert (integer_type_node,
9295 build1 (INDIRECT_REF, cst_uchar_node,
9296 fold_convert (cst_uchar_ptr_node,
9297 arg1)));
9300 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9301 if (p1 && *p1 == '\0')
9303 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9304 tree cst_uchar_ptr_node
9305 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9307 tree temp = fold_convert (integer_type_node,
9308 build1 (INDIRECT_REF, cst_uchar_node,
9309 fold_convert (cst_uchar_ptr_node,
9310 arg2)));
9311 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9314 return NULL_TREE;
9317 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9318 Return NULL_TREE if no simplification can be made. */
9320 static tree
9321 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9323 const char *p1, *p2;
9325 if (!validate_arg (arg1, POINTER_TYPE)
9326 || !validate_arg (arg2, POINTER_TYPE)
9327 || !validate_arg (len, INTEGER_TYPE))
9328 return NULL_TREE;
9330 /* If the LEN parameter is zero, return zero. */
9331 if (integer_zerop (len))
9332 return omit_two_operands (integer_type_node, integer_zero_node,
9333 arg1, arg2);
9335 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9336 if (operand_equal_p (arg1, arg2, 0))
9337 return omit_one_operand (integer_type_node, integer_zero_node, len);
9339 p1 = c_getstr (arg1);
9340 p2 = c_getstr (arg2);
9342 if (host_integerp (len, 1) && p1 && p2)
9344 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9345 if (i > 0)
9346 return integer_one_node;
9347 else if (i < 0)
9348 return integer_minus_one_node;
9349 else
9350 return integer_zero_node;
9353 /* If the second arg is "", and the length is greater than zero,
9354 return *(const unsigned char*)arg1. */
9355 if (p2 && *p2 == '\0'
9356 && TREE_CODE (len) == INTEGER_CST
9357 && tree_int_cst_sgn (len) == 1)
9359 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9360 tree cst_uchar_ptr_node
9361 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9363 return fold_convert (integer_type_node,
9364 build1 (INDIRECT_REF, cst_uchar_node,
9365 fold_convert (cst_uchar_ptr_node,
9366 arg1)));
9369 /* If the first arg is "", and the length is greater than zero,
9370 return -*(const unsigned char*)arg2. */
9371 if (p1 && *p1 == '\0'
9372 && TREE_CODE (len) == INTEGER_CST
9373 && tree_int_cst_sgn (len) == 1)
9375 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9376 tree cst_uchar_ptr_node
9377 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9379 tree temp = fold_convert (integer_type_node,
9380 build1 (INDIRECT_REF, cst_uchar_node,
9381 fold_convert (cst_uchar_ptr_node,
9382 arg2)));
9383 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9386 /* If len parameter is one, return an expression corresponding to
9387 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9388 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9390 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9391 tree cst_uchar_ptr_node
9392 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9394 tree ind1 = fold_convert (integer_type_node,
9395 build1 (INDIRECT_REF, cst_uchar_node,
9396 fold_convert (cst_uchar_ptr_node,
9397 arg1)));
9398 tree ind2 = fold_convert (integer_type_node,
9399 build1 (INDIRECT_REF, cst_uchar_node,
9400 fold_convert (cst_uchar_ptr_node,
9401 arg2)));
9402 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9405 return NULL_TREE;
9408 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9409 ARG. Return NULL_TREE if no simplification can be made. */
9411 static tree
9412 fold_builtin_signbit (tree arg, tree type)
9414 tree temp;
9416 if (!validate_arg (arg, REAL_TYPE))
9417 return NULL_TREE;
9419 /* If ARG is a compile-time constant, determine the result. */
9420 if (TREE_CODE (arg) == REAL_CST
9421 && !TREE_OVERFLOW (arg))
9423 REAL_VALUE_TYPE c;
9425 c = TREE_REAL_CST (arg);
9426 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9427 return fold_convert (type, temp);
9430 /* If ARG is non-negative, the result is always zero. */
9431 if (tree_expr_nonnegative_p (arg))
9432 return omit_one_operand (type, integer_zero_node, arg);
9434 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9435 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9436 return fold_build2 (LT_EXPR, type, arg,
9437 build_real (TREE_TYPE (arg), dconst0));
9439 return NULL_TREE;
9442 /* Fold function call to builtin copysign, copysignf or copysignl with
9443 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9444 be made. */
9446 static tree
9447 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9449 tree tem;
9451 if (!validate_arg (arg1, REAL_TYPE)
9452 || !validate_arg (arg2, REAL_TYPE))
9453 return NULL_TREE;
9455 /* copysign(X,X) is X. */
9456 if (operand_equal_p (arg1, arg2, 0))
9457 return fold_convert (type, arg1);
9459 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9460 if (TREE_CODE (arg1) == REAL_CST
9461 && TREE_CODE (arg2) == REAL_CST
9462 && !TREE_OVERFLOW (arg1)
9463 && !TREE_OVERFLOW (arg2))
9465 REAL_VALUE_TYPE c1, c2;
9467 c1 = TREE_REAL_CST (arg1);
9468 c2 = TREE_REAL_CST (arg2);
9469 /* c1.sign := c2.sign. */
9470 real_copysign (&c1, &c2);
9471 return build_real (type, c1);
9474 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9475 Remember to evaluate Y for side-effects. */
9476 if (tree_expr_nonnegative_p (arg2))
9477 return omit_one_operand (type,
9478 fold_build1 (ABS_EXPR, type, arg1),
9479 arg2);
9481 /* Strip sign changing operations for the first argument. */
9482 tem = fold_strip_sign_ops (arg1);
9483 if (tem)
9484 return build_call_expr (fndecl, 2, tem, arg2);
9486 return NULL_TREE;
9489 /* Fold a call to builtin isascii with argument ARG. */
9491 static tree
9492 fold_builtin_isascii (tree arg)
9494 if (!validate_arg (arg, INTEGER_TYPE))
9495 return NULL_TREE;
9496 else
9498 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9499 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9500 build_int_cst (NULL_TREE,
9501 ~ (unsigned HOST_WIDE_INT) 0x7f));
9502 return fold_build2 (EQ_EXPR, integer_type_node,
9503 arg, integer_zero_node);
9507 /* Fold a call to builtin toascii with argument ARG. */
9509 static tree
9510 fold_builtin_toascii (tree arg)
9512 if (!validate_arg (arg, INTEGER_TYPE))
9513 return NULL_TREE;
9515 /* Transform toascii(c) -> (c & 0x7f). */
9516 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9517 build_int_cst (NULL_TREE, 0x7f));
9520 /* Fold a call to builtin isdigit with argument ARG. */
9522 static tree
9523 fold_builtin_isdigit (tree arg)
9525 if (!validate_arg (arg, INTEGER_TYPE))
9526 return NULL_TREE;
9527 else
9529 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9530 /* According to the C standard, isdigit is unaffected by locale.
9531 However, it definitely is affected by the target character set. */
9532 unsigned HOST_WIDE_INT target_digit0
9533 = lang_hooks.to_target_charset ('0');
9535 if (target_digit0 == 0)
9536 return NULL_TREE;
9538 arg = fold_convert (unsigned_type_node, arg);
9539 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9540 build_int_cst (unsigned_type_node, target_digit0));
9541 return fold_build2 (LE_EXPR, integer_type_node, arg,
9542 build_int_cst (unsigned_type_node, 9));
9546 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9548 static tree
9549 fold_builtin_fabs (tree arg, tree type)
9551 if (!validate_arg (arg, REAL_TYPE))
9552 return NULL_TREE;
9554 arg = fold_convert (type, arg);
9555 if (TREE_CODE (arg) == REAL_CST)
9556 return fold_abs_const (arg, type);
9557 return fold_build1 (ABS_EXPR, type, arg);
9560 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9562 static tree
9563 fold_builtin_abs (tree arg, tree type)
9565 if (!validate_arg (arg, INTEGER_TYPE))
9566 return NULL_TREE;
9568 arg = fold_convert (type, arg);
9569 if (TREE_CODE (arg) == INTEGER_CST)
9570 return fold_abs_const (arg, type);
9571 return fold_build1 (ABS_EXPR, type, arg);
9574 /* Fold a call to builtin fmin or fmax. */
9576 static tree
9577 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9579 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9581 /* Calculate the result when the argument is a constant. */
9582 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9584 if (res)
9585 return res;
9587 /* If either argument is NaN, return the other one. Avoid the
9588 transformation if we get (and honor) a signalling NaN. Using
9589 omit_one_operand() ensures we create a non-lvalue. */
9590 if (TREE_CODE (arg0) == REAL_CST
9591 && real_isnan (&TREE_REAL_CST (arg0))
9592 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9593 || ! TREE_REAL_CST (arg0).signalling))
9594 return omit_one_operand (type, arg1, arg0);
9595 if (TREE_CODE (arg1) == REAL_CST
9596 && real_isnan (&TREE_REAL_CST (arg1))
9597 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9598 || ! TREE_REAL_CST (arg1).signalling))
9599 return omit_one_operand (type, arg0, arg1);
9601 /* Transform fmin/fmax(x,x) -> x. */
9602 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9603 return omit_one_operand (type, arg0, arg1);
9605 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9606 functions to return the numeric arg if the other one is NaN.
9607 These tree codes don't honor that, so only transform if
9608 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9609 handled, so we don't have to worry about it either. */
9610 if (flag_finite_math_only)
9611 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9612 fold_convert (type, arg0),
9613 fold_convert (type, arg1));
9615 return NULL_TREE;
9618 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9620 static tree
9621 fold_builtin_carg (tree arg, tree type)
9623 if (validate_arg (arg, COMPLEX_TYPE))
9625 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9627 if (atan2_fn)
9629 tree new_arg = builtin_save_expr (arg);
9630 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9631 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9632 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9636 return NULL_TREE;
9639 /* Fold a call to builtin logb/ilogb. */
9641 static tree
9642 fold_builtin_logb (tree arg, tree rettype)
9644 if (! validate_arg (arg, REAL_TYPE))
9645 return NULL_TREE;
9647 STRIP_NOPS (arg);
9649 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9651 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9653 switch (value->cl)
9655 case rvc_nan:
9656 case rvc_inf:
9657 /* If arg is Inf or NaN and we're logb, return it. */
9658 if (TREE_CODE (rettype) == REAL_TYPE)
9659 return fold_convert (rettype, arg);
9660 /* Fall through... */
9661 case rvc_zero:
9662 /* Zero may set errno and/or raise an exception for logb, also
9663 for ilogb we don't know FP_ILOGB0. */
9664 return NULL_TREE;
9665 case rvc_normal:
9666 /* For normal numbers, proceed iff radix == 2. In GCC,
9667 normalized significands are in the range [0.5, 1.0). We
9668 want the exponent as if they were [1.0, 2.0) so get the
9669 exponent and subtract 1. */
9670 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9671 return fold_convert (rettype, build_int_cst (NULL_TREE,
9672 REAL_EXP (value)-1));
9673 break;
9677 return NULL_TREE;
9680 /* Fold a call to builtin significand, if radix == 2. */
9682 static tree
9683 fold_builtin_significand (tree arg, tree rettype)
9685 if (! validate_arg (arg, REAL_TYPE))
9686 return NULL_TREE;
9688 STRIP_NOPS (arg);
9690 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9692 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9694 switch (value->cl)
9696 case rvc_zero:
9697 case rvc_nan:
9698 case rvc_inf:
9699 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9700 return fold_convert (rettype, arg);
9701 case rvc_normal:
9702 /* For normal numbers, proceed iff radix == 2. */
9703 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9705 REAL_VALUE_TYPE result = *value;
9706 /* In GCC, normalized significands are in the range [0.5,
9707 1.0). We want them to be [1.0, 2.0) so set the
9708 exponent to 1. */
9709 SET_REAL_EXP (&result, 1);
9710 return build_real (rettype, result);
9712 break;
9716 return NULL_TREE;
9719 /* Fold a call to builtin frexp, we can assume the base is 2. */
9721 static tree
9722 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9724 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9725 return NULL_TREE;
9727 STRIP_NOPS (arg0);
9729 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9730 return NULL_TREE;
9732 arg1 = build_fold_indirect_ref (arg1);
9734 /* Proceed if a valid pointer type was passed in. */
9735 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9737 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9738 tree frac, exp;
9740 switch (value->cl)
9742 case rvc_zero:
9743 /* For +-0, return (*exp = 0, +-0). */
9744 exp = integer_zero_node;
9745 frac = arg0;
9746 break;
9747 case rvc_nan:
9748 case rvc_inf:
9749 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9750 return omit_one_operand (rettype, arg0, arg1);
9751 case rvc_normal:
9753 /* Since the frexp function always expects base 2, and in
9754 GCC normalized significands are already in the range
9755 [0.5, 1.0), we have exactly what frexp wants. */
9756 REAL_VALUE_TYPE frac_rvt = *value;
9757 SET_REAL_EXP (&frac_rvt, 0);
9758 frac = build_real (rettype, frac_rvt);
9759 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9761 break;
9762 default:
9763 gcc_unreachable ();
9766 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9767 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9768 TREE_SIDE_EFFECTS (arg1) = 1;
9769 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9772 return NULL_TREE;
9775 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9776 then we can assume the base is two. If it's false, then we have to
9777 check the mode of the TYPE parameter in certain cases. */
9779 static tree
9780 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9782 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9784 STRIP_NOPS (arg0);
9785 STRIP_NOPS (arg1);
9787 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9788 if (real_zerop (arg0) || integer_zerop (arg1)
9789 || (TREE_CODE (arg0) == REAL_CST
9790 && !real_isfinite (&TREE_REAL_CST (arg0))))
9791 return omit_one_operand (type, arg0, arg1);
9793 /* If both arguments are constant, then try to evaluate it. */
9794 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9795 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9796 && host_integerp (arg1, 0))
9798 /* Bound the maximum adjustment to twice the range of the
9799 mode's valid exponents. Use abs to ensure the range is
9800 positive as a sanity check. */
9801 const long max_exp_adj = 2 *
9802 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9803 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9805 /* Get the user-requested adjustment. */
9806 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9808 /* The requested adjustment must be inside this range. This
9809 is a preliminary cap to avoid things like overflow, we
9810 may still fail to compute the result for other reasons. */
9811 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9813 REAL_VALUE_TYPE initial_result;
9815 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9817 /* Ensure we didn't overflow. */
9818 if (! real_isinf (&initial_result))
9820 const REAL_VALUE_TYPE trunc_result
9821 = real_value_truncate (TYPE_MODE (type), initial_result);
9823 /* Only proceed if the target mode can hold the
9824 resulting value. */
9825 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9826 return build_real (type, trunc_result);
9832 return NULL_TREE;
9835 /* Fold a call to builtin modf. */
9837 static tree
9838 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9840 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9841 return NULL_TREE;
9843 STRIP_NOPS (arg0);
9845 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9846 return NULL_TREE;
9848 arg1 = build_fold_indirect_ref (arg1);
9850 /* Proceed if a valid pointer type was passed in. */
9851 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9853 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9854 REAL_VALUE_TYPE trunc, frac;
9856 switch (value->cl)
9858 case rvc_nan:
9859 case rvc_zero:
9860 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9861 trunc = frac = *value;
9862 break;
9863 case rvc_inf:
9864 /* For +-Inf, return (*arg1 = arg0, +-0). */
9865 frac = dconst0;
9866 frac.sign = value->sign;
9867 trunc = *value;
9868 break;
9869 case rvc_normal:
9870 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9871 real_trunc (&trunc, VOIDmode, value);
9872 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9873 /* If the original number was negative and already
9874 integral, then the fractional part is -0.0. */
9875 if (value->sign && frac.cl == rvc_zero)
9876 frac.sign = value->sign;
9877 break;
9880 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9881 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9882 build_real (rettype, trunc));
9883 TREE_SIDE_EFFECTS (arg1) = 1;
9884 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9885 build_real (rettype, frac));
9888 return NULL_TREE;
9891 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9892 ARG is the argument for the call. */
9894 static tree
9895 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9897 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9898 REAL_VALUE_TYPE r;
9900 if (!validate_arg (arg, REAL_TYPE))
9901 return NULL_TREE;
9903 switch (builtin_index)
9905 case BUILT_IN_ISINF:
9906 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9907 return omit_one_operand (type, integer_zero_node, arg);
9909 if (TREE_CODE (arg) == REAL_CST)
9911 r = TREE_REAL_CST (arg);
9912 if (real_isinf (&r))
9913 return real_compare (GT_EXPR, &r, &dconst0)
9914 ? integer_one_node : integer_minus_one_node;
9915 else
9916 return integer_zero_node;
9919 return NULL_TREE;
9921 case BUILT_IN_ISINF_SIGN:
9923 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9924 /* In a boolean context, GCC will fold the inner COND_EXPR to
9925 1. So e.g. "if (isinf_sign(x))" would be folded to just
9926 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9927 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9928 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9929 tree tmp = NULL_TREE;
9931 arg = builtin_save_expr (arg);
9933 if (signbit_fn && isinf_fn)
9935 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9936 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9938 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9939 signbit_call, integer_zero_node);
9940 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9941 isinf_call, integer_zero_node);
9943 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9944 integer_minus_one_node, integer_one_node);
9945 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9946 integer_zero_node);
9949 return tmp;
9952 case BUILT_IN_ISFINITE:
9953 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9954 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9955 return omit_one_operand (type, integer_one_node, arg);
9957 if (TREE_CODE (arg) == REAL_CST)
9959 r = TREE_REAL_CST (arg);
9960 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9963 return NULL_TREE;
9965 case BUILT_IN_ISNAN:
9966 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9967 return omit_one_operand (type, integer_zero_node, arg);
9969 if (TREE_CODE (arg) == REAL_CST)
9971 r = TREE_REAL_CST (arg);
9972 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9975 arg = builtin_save_expr (arg);
9976 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9978 default:
9979 gcc_unreachable ();
9983 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9984 This builtin will generate code to return the appropriate floating
9985 point classification depending on the value of the floating point
9986 number passed in. The possible return values must be supplied as
9987 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9988 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9989 one floating point argument which is "type generic". */
9991 static tree
9992 fold_builtin_fpclassify (tree exp)
9994 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9995 arg, type, res, tmp;
9996 enum machine_mode mode;
9997 REAL_VALUE_TYPE r;
9998 char buf[128];
10000 /* Verify the required arguments in the original call. */
10001 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10002 INTEGER_TYPE, INTEGER_TYPE,
10003 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10004 return NULL_TREE;
10006 fp_nan = CALL_EXPR_ARG (exp, 0);
10007 fp_infinite = CALL_EXPR_ARG (exp, 1);
10008 fp_normal = CALL_EXPR_ARG (exp, 2);
10009 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10010 fp_zero = CALL_EXPR_ARG (exp, 4);
10011 arg = CALL_EXPR_ARG (exp, 5);
10012 type = TREE_TYPE (arg);
10013 mode = TYPE_MODE (type);
10014 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
10016 /* fpclassify(x) ->
10017 isnan(x) ? FP_NAN :
10018 (fabs(x) == Inf ? FP_INFINITE :
10019 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10020 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10022 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10023 build_real (type, dconst0));
10024 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
10026 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10027 real_from_string (&r, buf);
10028 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
10029 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
10031 if (HONOR_INFINITIES (mode))
10033 real_inf (&r);
10034 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10035 build_real (type, r));
10036 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
10039 if (HONOR_NANS (mode))
10041 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
10042 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
10045 return res;
10048 /* Fold a call to an unordered comparison function such as
10049 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10050 being called and ARG0 and ARG1 are the arguments for the call.
10051 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10052 the opposite of the desired result. UNORDERED_CODE is used
10053 for modes that can hold NaNs and ORDERED_CODE is used for
10054 the rest. */
10056 static tree
10057 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
10058 enum tree_code unordered_code,
10059 enum tree_code ordered_code)
10061 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10062 enum tree_code code;
10063 tree type0, type1;
10064 enum tree_code code0, code1;
10065 tree cmp_type = NULL_TREE;
10067 type0 = TREE_TYPE (arg0);
10068 type1 = TREE_TYPE (arg1);
10070 code0 = TREE_CODE (type0);
10071 code1 = TREE_CODE (type1);
10073 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10074 /* Choose the wider of two real types. */
10075 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10076 ? type0 : type1;
10077 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10078 cmp_type = type0;
10079 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10080 cmp_type = type1;
10082 arg0 = fold_convert (cmp_type, arg0);
10083 arg1 = fold_convert (cmp_type, arg1);
10085 if (unordered_code == UNORDERED_EXPR)
10087 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10088 return omit_two_operands (type, integer_zero_node, arg0, arg1);
10089 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
10092 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10093 : ordered_code;
10094 return fold_build1 (TRUTH_NOT_EXPR, type,
10095 fold_build2 (code, type, arg0, arg1));
10098 /* Fold a call to built-in function FNDECL with 0 arguments.
10099 IGNORE is true if the result of the function call is ignored. This
10100 function returns NULL_TREE if no simplification was possible. */
10102 static tree
10103 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10105 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10106 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10107 switch (fcode)
10109 CASE_FLT_FN (BUILT_IN_INF):
10110 case BUILT_IN_INFD32:
10111 case BUILT_IN_INFD64:
10112 case BUILT_IN_INFD128:
10113 return fold_builtin_inf (type, true);
10115 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10116 return fold_builtin_inf (type, false);
10118 case BUILT_IN_CLASSIFY_TYPE:
10119 return fold_builtin_classify_type (NULL_TREE);
10121 default:
10122 break;
10124 return NULL_TREE;
10127 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10128 IGNORE is true if the result of the function call is ignored. This
10129 function returns NULL_TREE if no simplification was possible. */
10131 static tree
10132 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
10134 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10135 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10136 switch (fcode)
10139 case BUILT_IN_CONSTANT_P:
10141 tree val = fold_builtin_constant_p (arg0);
10143 /* Gimplification will pull the CALL_EXPR for the builtin out of
10144 an if condition. When not optimizing, we'll not CSE it back.
10145 To avoid link error types of regressions, return false now. */
10146 if (!val && !optimize)
10147 val = integer_zero_node;
10149 return val;
10152 case BUILT_IN_CLASSIFY_TYPE:
10153 return fold_builtin_classify_type (arg0);
10155 case BUILT_IN_STRLEN:
10156 return fold_builtin_strlen (arg0);
10158 CASE_FLT_FN (BUILT_IN_FABS):
10159 return fold_builtin_fabs (arg0, type);
10161 case BUILT_IN_ABS:
10162 case BUILT_IN_LABS:
10163 case BUILT_IN_LLABS:
10164 case BUILT_IN_IMAXABS:
10165 return fold_builtin_abs (arg0, type);
10167 CASE_FLT_FN (BUILT_IN_CONJ):
10168 if (validate_arg (arg0, COMPLEX_TYPE))
10169 return fold_build1 (CONJ_EXPR, type, arg0);
10170 break;
10172 CASE_FLT_FN (BUILT_IN_CREAL):
10173 if (validate_arg (arg0, COMPLEX_TYPE))
10174 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
10175 break;
10177 CASE_FLT_FN (BUILT_IN_CIMAG):
10178 if (validate_arg (arg0, COMPLEX_TYPE))
10179 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10180 break;
10182 CASE_FLT_FN (BUILT_IN_CCOS):
10183 CASE_FLT_FN (BUILT_IN_CCOSH):
10184 /* These functions are "even", i.e. f(x) == f(-x). */
10185 if (validate_arg (arg0, COMPLEX_TYPE))
10187 tree narg = fold_strip_sign_ops (arg0);
10188 if (narg)
10189 return build_call_expr (fndecl, 1, narg);
10191 break;
10193 CASE_FLT_FN (BUILT_IN_CABS):
10194 return fold_builtin_cabs (arg0, type, fndecl);
10196 CASE_FLT_FN (BUILT_IN_CARG):
10197 return fold_builtin_carg (arg0, type);
10199 CASE_FLT_FN (BUILT_IN_SQRT):
10200 return fold_builtin_sqrt (arg0, type);
10202 CASE_FLT_FN (BUILT_IN_CBRT):
10203 return fold_builtin_cbrt (arg0, type);
10205 CASE_FLT_FN (BUILT_IN_ASIN):
10206 if (validate_arg (arg0, REAL_TYPE))
10207 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10208 &dconstm1, &dconst1, true);
10209 break;
10211 CASE_FLT_FN (BUILT_IN_ACOS):
10212 if (validate_arg (arg0, REAL_TYPE))
10213 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10214 &dconstm1, &dconst1, true);
10215 break;
10217 CASE_FLT_FN (BUILT_IN_ATAN):
10218 if (validate_arg (arg0, REAL_TYPE))
10219 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10220 break;
10222 CASE_FLT_FN (BUILT_IN_ASINH):
10223 if (validate_arg (arg0, REAL_TYPE))
10224 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10225 break;
10227 CASE_FLT_FN (BUILT_IN_ACOSH):
10228 if (validate_arg (arg0, REAL_TYPE))
10229 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10230 &dconst1, NULL, true);
10231 break;
10233 CASE_FLT_FN (BUILT_IN_ATANH):
10234 if (validate_arg (arg0, REAL_TYPE))
10235 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10236 &dconstm1, &dconst1, false);
10237 break;
10239 CASE_FLT_FN (BUILT_IN_SIN):
10240 if (validate_arg (arg0, REAL_TYPE))
10241 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10242 break;
10244 CASE_FLT_FN (BUILT_IN_COS):
10245 return fold_builtin_cos (arg0, type, fndecl);
10246 break;
10248 CASE_FLT_FN (BUILT_IN_TAN):
10249 return fold_builtin_tan (arg0, type);
10251 CASE_FLT_FN (BUILT_IN_CEXP):
10252 return fold_builtin_cexp (arg0, type);
10254 CASE_FLT_FN (BUILT_IN_CEXPI):
10255 if (validate_arg (arg0, REAL_TYPE))
10256 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10257 break;
10259 CASE_FLT_FN (BUILT_IN_SINH):
10260 if (validate_arg (arg0, REAL_TYPE))
10261 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10262 break;
10264 CASE_FLT_FN (BUILT_IN_COSH):
10265 return fold_builtin_cosh (arg0, type, fndecl);
10267 CASE_FLT_FN (BUILT_IN_TANH):
10268 if (validate_arg (arg0, REAL_TYPE))
10269 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10270 break;
10272 CASE_FLT_FN (BUILT_IN_ERF):
10273 if (validate_arg (arg0, REAL_TYPE))
10274 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10275 break;
10277 CASE_FLT_FN (BUILT_IN_ERFC):
10278 if (validate_arg (arg0, REAL_TYPE))
10279 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10280 break;
10282 CASE_FLT_FN (BUILT_IN_TGAMMA):
10283 if (validate_arg (arg0, REAL_TYPE))
10284 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10285 break;
10287 CASE_FLT_FN (BUILT_IN_EXP):
10288 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10290 CASE_FLT_FN (BUILT_IN_EXP2):
10291 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10293 CASE_FLT_FN (BUILT_IN_EXP10):
10294 CASE_FLT_FN (BUILT_IN_POW10):
10295 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10297 CASE_FLT_FN (BUILT_IN_EXPM1):
10298 if (validate_arg (arg0, REAL_TYPE))
10299 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10300 break;
10302 CASE_FLT_FN (BUILT_IN_LOG):
10303 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10305 CASE_FLT_FN (BUILT_IN_LOG2):
10306 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10308 CASE_FLT_FN (BUILT_IN_LOG10):
10309 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10311 CASE_FLT_FN (BUILT_IN_LOG1P):
10312 if (validate_arg (arg0, REAL_TYPE))
10313 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10314 &dconstm1, NULL, false);
10315 break;
10317 CASE_FLT_FN (BUILT_IN_J0):
10318 if (validate_arg (arg0, REAL_TYPE))
10319 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10320 NULL, NULL, 0);
10321 break;
10323 CASE_FLT_FN (BUILT_IN_J1):
10324 if (validate_arg (arg0, REAL_TYPE))
10325 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10326 NULL, NULL, 0);
10327 break;
10329 CASE_FLT_FN (BUILT_IN_Y0):
10330 if (validate_arg (arg0, REAL_TYPE))
10331 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10332 &dconst0, NULL, false);
10333 break;
10335 CASE_FLT_FN (BUILT_IN_Y1):
10336 if (validate_arg (arg0, REAL_TYPE))
10337 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10338 &dconst0, NULL, false);
10339 break;
10341 CASE_FLT_FN (BUILT_IN_NAN):
10342 case BUILT_IN_NAND32:
10343 case BUILT_IN_NAND64:
10344 case BUILT_IN_NAND128:
10345 return fold_builtin_nan (arg0, type, true);
10347 CASE_FLT_FN (BUILT_IN_NANS):
10348 return fold_builtin_nan (arg0, type, false);
10350 CASE_FLT_FN (BUILT_IN_FLOOR):
10351 return fold_builtin_floor (fndecl, arg0);
10353 CASE_FLT_FN (BUILT_IN_CEIL):
10354 return fold_builtin_ceil (fndecl, arg0);
10356 CASE_FLT_FN (BUILT_IN_TRUNC):
10357 return fold_builtin_trunc (fndecl, arg0);
10359 CASE_FLT_FN (BUILT_IN_ROUND):
10360 return fold_builtin_round (fndecl, arg0);
10362 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10363 CASE_FLT_FN (BUILT_IN_RINT):
10364 return fold_trunc_transparent_mathfn (fndecl, arg0);
10366 CASE_FLT_FN (BUILT_IN_LCEIL):
10367 CASE_FLT_FN (BUILT_IN_LLCEIL):
10368 CASE_FLT_FN (BUILT_IN_LFLOOR):
10369 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10370 CASE_FLT_FN (BUILT_IN_LROUND):
10371 CASE_FLT_FN (BUILT_IN_LLROUND):
10372 return fold_builtin_int_roundingfn (fndecl, arg0);
10374 CASE_FLT_FN (BUILT_IN_LRINT):
10375 CASE_FLT_FN (BUILT_IN_LLRINT):
10376 return fold_fixed_mathfn (fndecl, arg0);
10378 case BUILT_IN_BSWAP32:
10379 case BUILT_IN_BSWAP64:
10380 return fold_builtin_bswap (fndecl, arg0);
10382 CASE_INT_FN (BUILT_IN_FFS):
10383 CASE_INT_FN (BUILT_IN_CLZ):
10384 CASE_INT_FN (BUILT_IN_CTZ):
10385 CASE_INT_FN (BUILT_IN_POPCOUNT):
10386 CASE_INT_FN (BUILT_IN_PARITY):
10387 return fold_builtin_bitop (fndecl, arg0);
10389 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10390 return fold_builtin_signbit (arg0, type);
10392 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10393 return fold_builtin_significand (arg0, type);
10395 CASE_FLT_FN (BUILT_IN_ILOGB):
10396 CASE_FLT_FN (BUILT_IN_LOGB):
10397 return fold_builtin_logb (arg0, type);
10399 case BUILT_IN_ISASCII:
10400 return fold_builtin_isascii (arg0);
10402 case BUILT_IN_TOASCII:
10403 return fold_builtin_toascii (arg0);
10405 case BUILT_IN_ISDIGIT:
10406 return fold_builtin_isdigit (arg0);
10408 CASE_FLT_FN (BUILT_IN_FINITE):
10409 case BUILT_IN_FINITED32:
10410 case BUILT_IN_FINITED64:
10411 case BUILT_IN_FINITED128:
10412 case BUILT_IN_ISFINITE:
10413 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10415 CASE_FLT_FN (BUILT_IN_ISINF):
10416 case BUILT_IN_ISINFD32:
10417 case BUILT_IN_ISINFD64:
10418 case BUILT_IN_ISINFD128:
10419 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10421 case BUILT_IN_ISINF_SIGN:
10422 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10424 CASE_FLT_FN (BUILT_IN_ISNAN):
10425 case BUILT_IN_ISNAND32:
10426 case BUILT_IN_ISNAND64:
10427 case BUILT_IN_ISNAND128:
10428 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10430 case BUILT_IN_PRINTF:
10431 case BUILT_IN_PRINTF_UNLOCKED:
10432 case BUILT_IN_VPRINTF:
10433 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10435 default:
10436 break;
10439 return NULL_TREE;
10443 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10444 IGNORE is true if the result of the function call is ignored. This
10445 function returns NULL_TREE if no simplification was possible. */
10447 static tree
10448 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10450 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10451 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10453 switch (fcode)
10455 CASE_FLT_FN (BUILT_IN_JN):
10456 if (validate_arg (arg0, INTEGER_TYPE)
10457 && validate_arg (arg1, REAL_TYPE))
10458 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10459 break;
10461 CASE_FLT_FN (BUILT_IN_YN):
10462 if (validate_arg (arg0, INTEGER_TYPE)
10463 && validate_arg (arg1, REAL_TYPE))
10464 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10465 &dconst0, false);
10466 break;
10468 CASE_FLT_FN (BUILT_IN_DREM):
10469 CASE_FLT_FN (BUILT_IN_REMAINDER):
10470 if (validate_arg (arg0, REAL_TYPE)
10471 && validate_arg(arg1, REAL_TYPE))
10472 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10473 break;
10475 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10476 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10477 if (validate_arg (arg0, REAL_TYPE)
10478 && validate_arg(arg1, POINTER_TYPE))
10479 return do_mpfr_lgamma_r (arg0, arg1, type);
10480 break;
10482 CASE_FLT_FN (BUILT_IN_ATAN2):
10483 if (validate_arg (arg0, REAL_TYPE)
10484 && validate_arg(arg1, REAL_TYPE))
10485 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10486 break;
10488 CASE_FLT_FN (BUILT_IN_FDIM):
10489 if (validate_arg (arg0, REAL_TYPE)
10490 && validate_arg(arg1, REAL_TYPE))
10491 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10492 break;
10494 CASE_FLT_FN (BUILT_IN_HYPOT):
10495 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10497 CASE_FLT_FN (BUILT_IN_LDEXP):
10498 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10499 CASE_FLT_FN (BUILT_IN_SCALBN):
10500 CASE_FLT_FN (BUILT_IN_SCALBLN):
10501 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10503 CASE_FLT_FN (BUILT_IN_FREXP):
10504 return fold_builtin_frexp (arg0, arg1, type);
10506 CASE_FLT_FN (BUILT_IN_MODF):
10507 return fold_builtin_modf (arg0, arg1, type);
10509 case BUILT_IN_BZERO:
10510 return fold_builtin_bzero (arg0, arg1, ignore);
10512 case BUILT_IN_FPUTS:
10513 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10515 case BUILT_IN_FPUTS_UNLOCKED:
10516 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10518 case BUILT_IN_STRSTR:
10519 return fold_builtin_strstr (arg0, arg1, type);
10521 case BUILT_IN_STRCAT:
10522 return fold_builtin_strcat (arg0, arg1);
10524 case BUILT_IN_STRSPN:
10525 return fold_builtin_strspn (arg0, arg1);
10527 case BUILT_IN_STRCSPN:
10528 return fold_builtin_strcspn (arg0, arg1);
10530 case BUILT_IN_STRCHR:
10531 case BUILT_IN_INDEX:
10532 return fold_builtin_strchr (arg0, arg1, type);
10534 case BUILT_IN_STRRCHR:
10535 case BUILT_IN_RINDEX:
10536 return fold_builtin_strrchr (arg0, arg1, type);
10538 case BUILT_IN_STRCPY:
10539 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10541 case BUILT_IN_STPCPY:
10542 if (ignore)
10544 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10545 if (!fn)
10546 break;
10548 return build_call_expr (fn, 2, arg0, arg1);
10550 break;
10552 case BUILT_IN_STRCMP:
10553 return fold_builtin_strcmp (arg0, arg1);
10555 case BUILT_IN_STRPBRK:
10556 return fold_builtin_strpbrk (arg0, arg1, type);
10558 case BUILT_IN_EXPECT:
10559 return fold_builtin_expect (arg0, arg1);
10561 CASE_FLT_FN (BUILT_IN_POW):
10562 return fold_builtin_pow (fndecl, arg0, arg1, type);
10564 CASE_FLT_FN (BUILT_IN_POWI):
10565 return fold_builtin_powi (fndecl, arg0, arg1, type);
10567 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10568 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10570 CASE_FLT_FN (BUILT_IN_FMIN):
10571 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10573 CASE_FLT_FN (BUILT_IN_FMAX):
10574 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10576 case BUILT_IN_ISGREATER:
10577 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10578 case BUILT_IN_ISGREATEREQUAL:
10579 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10580 case BUILT_IN_ISLESS:
10581 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10582 case BUILT_IN_ISLESSEQUAL:
10583 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10584 case BUILT_IN_ISLESSGREATER:
10585 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10586 case BUILT_IN_ISUNORDERED:
10587 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10588 NOP_EXPR);
10590 /* We do the folding for va_start in the expander. */
10591 case BUILT_IN_VA_START:
10592 break;
10594 case BUILT_IN_SPRINTF:
10595 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10597 case BUILT_IN_OBJECT_SIZE:
10598 return fold_builtin_object_size (arg0, arg1);
10600 case BUILT_IN_PRINTF:
10601 case BUILT_IN_PRINTF_UNLOCKED:
10602 case BUILT_IN_VPRINTF:
10603 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10605 case BUILT_IN_PRINTF_CHK:
10606 case BUILT_IN_VPRINTF_CHK:
10607 if (!validate_arg (arg0, INTEGER_TYPE)
10608 || TREE_SIDE_EFFECTS (arg0))
10609 return NULL_TREE;
10610 else
10611 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10612 break;
10614 case BUILT_IN_FPRINTF:
10615 case BUILT_IN_FPRINTF_UNLOCKED:
10616 case BUILT_IN_VFPRINTF:
10617 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10618 ignore, fcode);
10620 default:
10621 break;
10623 return NULL_TREE;
10626 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10627 and ARG2. IGNORE is true if the result of the function call is ignored.
10628 This function returns NULL_TREE if no simplification was possible. */
10630 static tree
10631 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10633 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10634 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10635 switch (fcode)
10638 CASE_FLT_FN (BUILT_IN_SINCOS):
10639 return fold_builtin_sincos (arg0, arg1, arg2);
10641 CASE_FLT_FN (BUILT_IN_FMA):
10642 if (validate_arg (arg0, REAL_TYPE)
10643 && validate_arg(arg1, REAL_TYPE)
10644 && validate_arg(arg2, REAL_TYPE))
10645 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10646 break;
10648 CASE_FLT_FN (BUILT_IN_REMQUO):
10649 if (validate_arg (arg0, REAL_TYPE)
10650 && validate_arg(arg1, REAL_TYPE)
10651 && validate_arg(arg2, POINTER_TYPE))
10652 return do_mpfr_remquo (arg0, arg1, arg2);
10653 break;
10655 case BUILT_IN_MEMSET:
10656 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10658 case BUILT_IN_BCOPY:
10659 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10661 case BUILT_IN_MEMCPY:
10662 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10664 case BUILT_IN_MEMPCPY:
10665 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10667 case BUILT_IN_MEMMOVE:
10668 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10670 case BUILT_IN_STRNCAT:
10671 return fold_builtin_strncat (arg0, arg1, arg2);
10673 case BUILT_IN_STRNCPY:
10674 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10676 case BUILT_IN_STRNCMP:
10677 return fold_builtin_strncmp (arg0, arg1, arg2);
10679 case BUILT_IN_MEMCHR:
10680 return fold_builtin_memchr (arg0, arg1, arg2, type);
10682 case BUILT_IN_BCMP:
10683 case BUILT_IN_MEMCMP:
10684 return fold_builtin_memcmp (arg0, arg1, arg2);;
10686 case BUILT_IN_SPRINTF:
10687 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10689 case BUILT_IN_STRCPY_CHK:
10690 case BUILT_IN_STPCPY_CHK:
10691 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10692 ignore, fcode);
10694 case BUILT_IN_STRCAT_CHK:
10695 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10697 case BUILT_IN_PRINTF_CHK:
10698 case BUILT_IN_VPRINTF_CHK:
10699 if (!validate_arg (arg0, INTEGER_TYPE)
10700 || TREE_SIDE_EFFECTS (arg0))
10701 return NULL_TREE;
10702 else
10703 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10704 break;
10706 case BUILT_IN_FPRINTF:
10707 case BUILT_IN_FPRINTF_UNLOCKED:
10708 case BUILT_IN_VFPRINTF:
10709 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10711 case BUILT_IN_FPRINTF_CHK:
10712 case BUILT_IN_VFPRINTF_CHK:
10713 if (!validate_arg (arg1, INTEGER_TYPE)
10714 || TREE_SIDE_EFFECTS (arg1))
10715 return NULL_TREE;
10716 else
10717 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10718 ignore, fcode);
10720 default:
10721 break;
10723 return NULL_TREE;
10726 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10727 ARG2, and ARG3. IGNORE is true if the result of the function call is
10728 ignored. This function returns NULL_TREE if no simplification was
10729 possible. */
10731 static tree
10732 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10733 bool ignore)
10735 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10737 switch (fcode)
10739 case BUILT_IN_MEMCPY_CHK:
10740 case BUILT_IN_MEMPCPY_CHK:
10741 case BUILT_IN_MEMMOVE_CHK:
10742 case BUILT_IN_MEMSET_CHK:
10743 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10744 NULL_TREE, ignore,
10745 DECL_FUNCTION_CODE (fndecl));
10747 case BUILT_IN_STRNCPY_CHK:
10748 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10750 case BUILT_IN_STRNCAT_CHK:
10751 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10753 case BUILT_IN_FPRINTF_CHK:
10754 case BUILT_IN_VFPRINTF_CHK:
10755 if (!validate_arg (arg1, INTEGER_TYPE)
10756 || TREE_SIDE_EFFECTS (arg1))
10757 return NULL_TREE;
10758 else
10759 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10760 ignore, fcode);
10761 break;
10763 default:
10764 break;
10766 return NULL_TREE;
10769 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10770 arguments, where NARGS <= 4. IGNORE is true if the result of the
10771 function call is ignored. This function returns NULL_TREE if no
10772 simplification was possible. Note that this only folds builtins with
10773 fixed argument patterns. Foldings that do varargs-to-varargs
10774 transformations, or that match calls with more than 4 arguments,
10775 need to be handled with fold_builtin_varargs instead. */
10777 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10779 static tree
10780 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10782 tree ret = NULL_TREE;
10784 switch (nargs)
10786 case 0:
10787 ret = fold_builtin_0 (fndecl, ignore);
10788 break;
10789 case 1:
10790 ret = fold_builtin_1 (fndecl, args[0], ignore);
10791 break;
10792 case 2:
10793 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10794 break;
10795 case 3:
10796 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10797 break;
10798 case 4:
10799 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10800 ignore);
10801 break;
10802 default:
10803 break;
10805 if (ret)
10807 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10808 TREE_NO_WARNING (ret) = 1;
10809 return ret;
10811 return NULL_TREE;
10814 /* Builtins with folding operations that operate on "..." arguments
10815 need special handling; we need to store the arguments in a convenient
10816 data structure before attempting any folding. Fortunately there are
10817 only a few builtins that fall into this category. FNDECL is the
10818 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10819 result of the function call is ignored. */
10821 static tree
10822 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10824 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10825 tree ret = NULL_TREE;
10827 switch (fcode)
10829 case BUILT_IN_SPRINTF_CHK:
10830 case BUILT_IN_VSPRINTF_CHK:
10831 ret = fold_builtin_sprintf_chk (exp, fcode);
10832 break;
10834 case BUILT_IN_SNPRINTF_CHK:
10835 case BUILT_IN_VSNPRINTF_CHK:
10836 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10837 break;
10839 case BUILT_IN_FPCLASSIFY:
10840 ret = fold_builtin_fpclassify (exp);
10841 break;
10843 default:
10844 break;
10846 if (ret)
10848 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10849 TREE_NO_WARNING (ret) = 1;
10850 return ret;
10852 return NULL_TREE;
10855 /* Return true if FNDECL shouldn't be folded right now.
10856 If a built-in function has an inline attribute always_inline
10857 wrapper, defer folding it after always_inline functions have
10858 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10859 might not be performed. */
10861 static bool
10862 avoid_folding_inline_builtin (tree fndecl)
10864 return (DECL_DECLARED_INLINE_P (fndecl)
10865 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10866 && cfun
10867 && !cfun->always_inline_functions_inlined
10868 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10871 /* A wrapper function for builtin folding that prevents warnings for
10872 "statement without effect" and the like, caused by removing the
10873 call node earlier than the warning is generated. */
10875 tree
10876 fold_call_expr (tree exp, bool ignore)
10878 tree ret = NULL_TREE;
10879 tree fndecl = get_callee_fndecl (exp);
10880 if (fndecl
10881 && TREE_CODE (fndecl) == FUNCTION_DECL
10882 && DECL_BUILT_IN (fndecl)
10883 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10884 yet. Defer folding until we see all the arguments
10885 (after inlining). */
10886 && !CALL_EXPR_VA_ARG_PACK (exp))
10888 int nargs = call_expr_nargs (exp);
10890 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10891 instead last argument is __builtin_va_arg_pack (). Defer folding
10892 even in that case, until arguments are finalized. */
10893 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10895 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10896 if (fndecl2
10897 && TREE_CODE (fndecl2) == FUNCTION_DECL
10898 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10899 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10900 return NULL_TREE;
10903 if (avoid_folding_inline_builtin (fndecl))
10904 return NULL_TREE;
10906 /* FIXME: Don't use a list in this interface. */
10907 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10908 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10909 else
10911 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10913 tree *args = CALL_EXPR_ARGP (exp);
10914 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10916 if (!ret)
10917 ret = fold_builtin_varargs (fndecl, exp, ignore);
10918 if (ret)
10920 /* Propagate location information from original call to
10921 expansion of builtin. Otherwise things like
10922 maybe_emit_chk_warning, that operate on the expansion
10923 of a builtin, will use the wrong location information. */
10924 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10926 tree realret = ret;
10927 if (TREE_CODE (ret) == NOP_EXPR)
10928 realret = TREE_OPERAND (ret, 0);
10929 if (CAN_HAVE_LOCATION_P (realret)
10930 && !EXPR_HAS_LOCATION (realret))
10931 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10933 return ret;
10937 return NULL_TREE;
10940 /* Conveniently construct a function call expression. FNDECL names the
10941 function to be called and ARGLIST is a TREE_LIST of arguments. */
10943 tree
10944 build_function_call_expr (tree fndecl, tree arglist)
10946 tree fntype = TREE_TYPE (fndecl);
10947 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10948 int n = list_length (arglist);
10949 tree *argarray = (tree *) alloca (n * sizeof (tree));
10950 int i;
10952 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10953 argarray[i] = TREE_VALUE (arglist);
10954 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10957 /* Conveniently construct a function call expression. FNDECL names the
10958 function to be called, N is the number of arguments, and the "..."
10959 parameters are the argument expressions. */
10961 tree
10962 build_call_expr (tree fndecl, int n, ...)
10964 va_list ap;
10965 tree fntype = TREE_TYPE (fndecl);
10966 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10967 tree *argarray = (tree *) alloca (n * sizeof (tree));
10968 int i;
10970 va_start (ap, n);
10971 for (i = 0; i < n; i++)
10972 argarray[i] = va_arg (ap, tree);
10973 va_end (ap);
10974 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10977 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10978 N arguments are passed in the array ARGARRAY. */
10980 tree
10981 fold_builtin_call_array (tree type,
10982 tree fn,
10983 int n,
10984 tree *argarray)
10986 tree ret = NULL_TREE;
10987 int i;
10988 tree exp;
10990 if (TREE_CODE (fn) == ADDR_EXPR)
10992 tree fndecl = TREE_OPERAND (fn, 0);
10993 if (TREE_CODE (fndecl) == FUNCTION_DECL
10994 && DECL_BUILT_IN (fndecl))
10996 /* If last argument is __builtin_va_arg_pack (), arguments to this
10997 function are not finalized yet. Defer folding until they are. */
10998 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11000 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11001 if (fndecl2
11002 && TREE_CODE (fndecl2) == FUNCTION_DECL
11003 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11004 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11005 return build_call_array (type, fn, n, argarray);
11007 if (avoid_folding_inline_builtin (fndecl))
11008 return build_call_array (type, fn, n, argarray);
11009 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11011 tree arglist = NULL_TREE;
11012 for (i = n - 1; i >= 0; i--)
11013 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11014 ret = targetm.fold_builtin (fndecl, arglist, false);
11015 if (ret)
11016 return ret;
11017 return build_call_array (type, fn, n, argarray);
11019 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11021 /* First try the transformations that don't require consing up
11022 an exp. */
11023 ret = fold_builtin_n (fndecl, argarray, n, false);
11024 if (ret)
11025 return ret;
11028 /* If we got this far, we need to build an exp. */
11029 exp = build_call_array (type, fn, n, argarray);
11030 ret = fold_builtin_varargs (fndecl, exp, false);
11031 return ret ? ret : exp;
11035 return build_call_array (type, fn, n, argarray);
11038 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11039 along with N new arguments specified as the "..." parameters. SKIP
11040 is the number of arguments in EXP to be omitted. This function is used
11041 to do varargs-to-varargs transformations. */
11043 static tree
11044 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
11046 int oldnargs = call_expr_nargs (exp);
11047 int nargs = oldnargs - skip + n;
11048 tree fntype = TREE_TYPE (fndecl);
11049 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11050 tree *buffer;
11052 if (n > 0)
11054 int i, j;
11055 va_list ap;
11057 buffer = XALLOCAVEC (tree, nargs);
11058 va_start (ap, n);
11059 for (i = 0; i < n; i++)
11060 buffer[i] = va_arg (ap, tree);
11061 va_end (ap);
11062 for (j = skip; j < oldnargs; j++, i++)
11063 buffer[i] = CALL_EXPR_ARG (exp, j);
11065 else
11066 buffer = CALL_EXPR_ARGP (exp) + skip;
11068 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
11071 /* Validate a single argument ARG against a tree code CODE representing
11072 a type. */
11074 static bool
11075 validate_arg (const_tree arg, enum tree_code code)
11077 if (!arg)
11078 return false;
11079 else if (code == POINTER_TYPE)
11080 return POINTER_TYPE_P (TREE_TYPE (arg));
11081 else if (code == INTEGER_TYPE)
11082 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11083 return code == TREE_CODE (TREE_TYPE (arg));
11086 /* This function validates the types of a function call argument list
11087 against a specified list of tree_codes. If the last specifier is a 0,
11088 that represents an ellipses, otherwise the last specifier must be a
11089 VOID_TYPE.
11091 This is the GIMPLE version of validate_arglist. Eventually we want to
11092 completely convert builtins.c to work from GIMPLEs and the tree based
11093 validate_arglist will then be removed. */
11095 bool
11096 validate_gimple_arglist (const_gimple call, ...)
11098 enum tree_code code;
11099 bool res = 0;
11100 va_list ap;
11101 const_tree arg;
11102 size_t i;
11104 va_start (ap, call);
11105 i = 0;
11109 code = va_arg (ap, enum tree_code);
11110 switch (code)
11112 case 0:
11113 /* This signifies an ellipses, any further arguments are all ok. */
11114 res = true;
11115 goto end;
11116 case VOID_TYPE:
11117 /* This signifies an endlink, if no arguments remain, return
11118 true, otherwise return false. */
11119 res = (i == gimple_call_num_args (call));
11120 goto end;
11121 default:
11122 /* If no parameters remain or the parameter's code does not
11123 match the specified code, return false. Otherwise continue
11124 checking any remaining arguments. */
11125 arg = gimple_call_arg (call, i++);
11126 if (!validate_arg (arg, code))
11127 goto end;
11128 break;
11131 while (1);
11133 /* We need gotos here since we can only have one VA_CLOSE in a
11134 function. */
11135 end: ;
11136 va_end (ap);
11138 return res;
11141 /* This function validates the types of a function call argument list
11142 against a specified list of tree_codes. If the last specifier is a 0,
11143 that represents an ellipses, otherwise the last specifier must be a
11144 VOID_TYPE. */
11146 bool
11147 validate_arglist (const_tree callexpr, ...)
11149 enum tree_code code;
11150 bool res = 0;
11151 va_list ap;
11152 const_call_expr_arg_iterator iter;
11153 const_tree arg;
11155 va_start (ap, callexpr);
11156 init_const_call_expr_arg_iterator (callexpr, &iter);
11160 code = va_arg (ap, enum tree_code);
11161 switch (code)
11163 case 0:
11164 /* This signifies an ellipses, any further arguments are all ok. */
11165 res = true;
11166 goto end;
11167 case VOID_TYPE:
11168 /* This signifies an endlink, if no arguments remain, return
11169 true, otherwise return false. */
11170 res = !more_const_call_expr_args_p (&iter);
11171 goto end;
11172 default:
11173 /* If no parameters remain or the parameter's code does not
11174 match the specified code, return false. Otherwise continue
11175 checking any remaining arguments. */
11176 arg = next_const_call_expr_arg (&iter);
11177 if (!validate_arg (arg, code))
11178 goto end;
11179 break;
11182 while (1);
11184 /* We need gotos here since we can only have one VA_CLOSE in a
11185 function. */
11186 end: ;
11187 va_end (ap);
11189 return res;
11192 /* Default target-specific builtin expander that does nothing. */
11195 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11196 rtx target ATTRIBUTE_UNUSED,
11197 rtx subtarget ATTRIBUTE_UNUSED,
11198 enum machine_mode mode ATTRIBUTE_UNUSED,
11199 int ignore ATTRIBUTE_UNUSED)
11201 return NULL_RTX;
11204 /* Returns true is EXP represents data that would potentially reside
11205 in a readonly section. */
11207 static bool
11208 readonly_data_expr (tree exp)
11210 STRIP_NOPS (exp);
11212 if (TREE_CODE (exp) != ADDR_EXPR)
11213 return false;
11215 exp = get_base_address (TREE_OPERAND (exp, 0));
11216 if (!exp)
11217 return false;
11219 /* Make sure we call decl_readonly_section only for trees it
11220 can handle (since it returns true for everything it doesn't
11221 understand). */
11222 if (TREE_CODE (exp) == STRING_CST
11223 || TREE_CODE (exp) == CONSTRUCTOR
11224 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11225 return decl_readonly_section (exp, 0);
11226 else
11227 return false;
11230 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11231 to the call, and TYPE is its return type.
11233 Return NULL_TREE if no simplification was possible, otherwise return the
11234 simplified form of the call as a tree.
11236 The simplified form may be a constant or other expression which
11237 computes the same value, but in a more efficient manner (including
11238 calls to other builtin functions).
11240 The call may contain arguments which need to be evaluated, but
11241 which are not useful to determine the result of the call. In
11242 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11243 COMPOUND_EXPR will be an argument which must be evaluated.
11244 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11245 COMPOUND_EXPR in the chain will contain the tree for the simplified
11246 form of the builtin function call. */
11248 static tree
11249 fold_builtin_strstr (tree s1, tree s2, tree type)
11251 if (!validate_arg (s1, POINTER_TYPE)
11252 || !validate_arg (s2, POINTER_TYPE))
11253 return NULL_TREE;
11254 else
11256 tree fn;
11257 const char *p1, *p2;
11259 p2 = c_getstr (s2);
11260 if (p2 == NULL)
11261 return NULL_TREE;
11263 p1 = c_getstr (s1);
11264 if (p1 != NULL)
11266 const char *r = strstr (p1, p2);
11267 tree tem;
11269 if (r == NULL)
11270 return build_int_cst (TREE_TYPE (s1), 0);
11272 /* Return an offset into the constant string argument. */
11273 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11274 s1, size_int (r - p1));
11275 return fold_convert (type, tem);
11278 /* The argument is const char *, and the result is char *, so we need
11279 a type conversion here to avoid a warning. */
11280 if (p2[0] == '\0')
11281 return fold_convert (type, s1);
11283 if (p2[1] != '\0')
11284 return NULL_TREE;
11286 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11287 if (!fn)
11288 return NULL_TREE;
11290 /* New argument list transforming strstr(s1, s2) to
11291 strchr(s1, s2[0]). */
11292 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11296 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11297 the call, and TYPE is its return type.
11299 Return NULL_TREE if no simplification was possible, otherwise return the
11300 simplified form of the call as a tree.
11302 The simplified form may be a constant or other expression which
11303 computes the same value, but in a more efficient manner (including
11304 calls to other builtin functions).
11306 The call may contain arguments which need to be evaluated, but
11307 which are not useful to determine the result of the call. In
11308 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11309 COMPOUND_EXPR will be an argument which must be evaluated.
11310 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11311 COMPOUND_EXPR in the chain will contain the tree for the simplified
11312 form of the builtin function call. */
11314 static tree
11315 fold_builtin_strchr (tree s1, tree s2, tree type)
11317 if (!validate_arg (s1, POINTER_TYPE)
11318 || !validate_arg (s2, INTEGER_TYPE))
11319 return NULL_TREE;
11320 else
11322 const char *p1;
11324 if (TREE_CODE (s2) != INTEGER_CST)
11325 return NULL_TREE;
11327 p1 = c_getstr (s1);
11328 if (p1 != NULL)
11330 char c;
11331 const char *r;
11332 tree tem;
11334 if (target_char_cast (s2, &c))
11335 return NULL_TREE;
11337 r = strchr (p1, c);
11339 if (r == NULL)
11340 return build_int_cst (TREE_TYPE (s1), 0);
11342 /* Return an offset into the constant string argument. */
11343 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11344 s1, size_int (r - p1));
11345 return fold_convert (type, tem);
11347 return NULL_TREE;
11351 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11352 the call, and TYPE is its return type.
11354 Return NULL_TREE if no simplification was possible, otherwise return the
11355 simplified form of the call as a tree.
11357 The simplified form may be a constant or other expression which
11358 computes the same value, but in a more efficient manner (including
11359 calls to other builtin functions).
11361 The call may contain arguments which need to be evaluated, but
11362 which are not useful to determine the result of the call. In
11363 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11364 COMPOUND_EXPR will be an argument which must be evaluated.
11365 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11366 COMPOUND_EXPR in the chain will contain the tree for the simplified
11367 form of the builtin function call. */
11369 static tree
11370 fold_builtin_strrchr (tree s1, tree s2, tree type)
11372 if (!validate_arg (s1, POINTER_TYPE)
11373 || !validate_arg (s2, INTEGER_TYPE))
11374 return NULL_TREE;
11375 else
11377 tree fn;
11378 const char *p1;
11380 if (TREE_CODE (s2) != INTEGER_CST)
11381 return NULL_TREE;
11383 p1 = c_getstr (s1);
11384 if (p1 != NULL)
11386 char c;
11387 const char *r;
11388 tree tem;
11390 if (target_char_cast (s2, &c))
11391 return NULL_TREE;
11393 r = strrchr (p1, c);
11395 if (r == NULL)
11396 return build_int_cst (TREE_TYPE (s1), 0);
11398 /* Return an offset into the constant string argument. */
11399 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11400 s1, size_int (r - p1));
11401 return fold_convert (type, tem);
11404 if (! integer_zerop (s2))
11405 return NULL_TREE;
11407 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11408 if (!fn)
11409 return NULL_TREE;
11411 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11412 return build_call_expr (fn, 2, s1, s2);
11416 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11417 to the call, and TYPE is its return type.
11419 Return NULL_TREE if no simplification was possible, otherwise return the
11420 simplified form of the call as a tree.
11422 The simplified form may be a constant or other expression which
11423 computes the same value, but in a more efficient manner (including
11424 calls to other builtin functions).
11426 The call may contain arguments which need to be evaluated, but
11427 which are not useful to determine the result of the call. In
11428 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11429 COMPOUND_EXPR will be an argument which must be evaluated.
11430 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11431 COMPOUND_EXPR in the chain will contain the tree for the simplified
11432 form of the builtin function call. */
11434 static tree
11435 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11437 if (!validate_arg (s1, POINTER_TYPE)
11438 || !validate_arg (s2, POINTER_TYPE))
11439 return NULL_TREE;
11440 else
11442 tree fn;
11443 const char *p1, *p2;
11445 p2 = c_getstr (s2);
11446 if (p2 == NULL)
11447 return NULL_TREE;
11449 p1 = c_getstr (s1);
11450 if (p1 != NULL)
11452 const char *r = strpbrk (p1, p2);
11453 tree tem;
11455 if (r == NULL)
11456 return build_int_cst (TREE_TYPE (s1), 0);
11458 /* Return an offset into the constant string argument. */
11459 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11460 s1, size_int (r - p1));
11461 return fold_convert (type, tem);
11464 if (p2[0] == '\0')
11465 /* strpbrk(x, "") == NULL.
11466 Evaluate and ignore s1 in case it had side-effects. */
11467 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11469 if (p2[1] != '\0')
11470 return NULL_TREE; /* Really call strpbrk. */
11472 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11473 if (!fn)
11474 return NULL_TREE;
11476 /* New argument list transforming strpbrk(s1, s2) to
11477 strchr(s1, s2[0]). */
11478 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11482 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11483 to the call.
11485 Return NULL_TREE if no simplification was possible, otherwise return the
11486 simplified form of the call as a tree.
11488 The simplified form may be a constant or other expression which
11489 computes the same value, but in a more efficient manner (including
11490 calls to other builtin functions).
11492 The call may contain arguments which need to be evaluated, but
11493 which are not useful to determine the result of the call. In
11494 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11495 COMPOUND_EXPR will be an argument which must be evaluated.
11496 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11497 COMPOUND_EXPR in the chain will contain the tree for the simplified
11498 form of the builtin function call. */
11500 static tree
11501 fold_builtin_strcat (tree dst, tree src)
11503 if (!validate_arg (dst, POINTER_TYPE)
11504 || !validate_arg (src, POINTER_TYPE))
11505 return NULL_TREE;
11506 else
11508 const char *p = c_getstr (src);
11510 /* If the string length is zero, return the dst parameter. */
11511 if (p && *p == '\0')
11512 return dst;
11514 return NULL_TREE;
11518 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11519 arguments to the call.
11521 Return NULL_TREE if no simplification was possible, otherwise return the
11522 simplified form of the call as a tree.
11524 The simplified form may be a constant or other expression which
11525 computes the same value, but in a more efficient manner (including
11526 calls to other builtin functions).
11528 The call may contain arguments which need to be evaluated, but
11529 which are not useful to determine the result of the call. In
11530 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11531 COMPOUND_EXPR will be an argument which must be evaluated.
11532 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11533 COMPOUND_EXPR in the chain will contain the tree for the simplified
11534 form of the builtin function call. */
11536 static tree
11537 fold_builtin_strncat (tree dst, tree src, tree len)
11539 if (!validate_arg (dst, POINTER_TYPE)
11540 || !validate_arg (src, POINTER_TYPE)
11541 || !validate_arg (len, INTEGER_TYPE))
11542 return NULL_TREE;
11543 else
11545 const char *p = c_getstr (src);
11547 /* If the requested length is zero, or the src parameter string
11548 length is zero, return the dst parameter. */
11549 if (integer_zerop (len) || (p && *p == '\0'))
11550 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11552 /* If the requested len is greater than or equal to the string
11553 length, call strcat. */
11554 if (TREE_CODE (len) == INTEGER_CST && p
11555 && compare_tree_int (len, strlen (p)) >= 0)
11557 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11559 /* If the replacement _DECL isn't initialized, don't do the
11560 transformation. */
11561 if (!fn)
11562 return NULL_TREE;
11564 return build_call_expr (fn, 2, dst, src);
11566 return NULL_TREE;
11570 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11571 to the call.
11573 Return NULL_TREE if no simplification was possible, otherwise return the
11574 simplified form of the call as a tree.
11576 The simplified form may be a constant or other expression which
11577 computes the same value, but in a more efficient manner (including
11578 calls to other builtin functions).
11580 The call may contain arguments which need to be evaluated, but
11581 which are not useful to determine the result of the call. In
11582 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11583 COMPOUND_EXPR will be an argument which must be evaluated.
11584 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11585 COMPOUND_EXPR in the chain will contain the tree for the simplified
11586 form of the builtin function call. */
11588 static tree
11589 fold_builtin_strspn (tree s1, tree s2)
11591 if (!validate_arg (s1, POINTER_TYPE)
11592 || !validate_arg (s2, POINTER_TYPE))
11593 return NULL_TREE;
11594 else
11596 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11598 /* If both arguments are constants, evaluate at compile-time. */
11599 if (p1 && p2)
11601 const size_t r = strspn (p1, p2);
11602 return size_int (r);
11605 /* If either argument is "", return NULL_TREE. */
11606 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11607 /* Evaluate and ignore both arguments in case either one has
11608 side-effects. */
11609 return omit_two_operands (size_type_node, size_zero_node,
11610 s1, s2);
11611 return NULL_TREE;
11615 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11616 to the call.
11618 Return NULL_TREE if no simplification was possible, otherwise return the
11619 simplified form of the call as a tree.
11621 The simplified form may be a constant or other expression which
11622 computes the same value, but in a more efficient manner (including
11623 calls to other builtin functions).
11625 The call may contain arguments which need to be evaluated, but
11626 which are not useful to determine the result of the call. In
11627 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11628 COMPOUND_EXPR will be an argument which must be evaluated.
11629 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11630 COMPOUND_EXPR in the chain will contain the tree for the simplified
11631 form of the builtin function call. */
11633 static tree
11634 fold_builtin_strcspn (tree s1, tree s2)
11636 if (!validate_arg (s1, POINTER_TYPE)
11637 || !validate_arg (s2, POINTER_TYPE))
11638 return NULL_TREE;
11639 else
11641 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11643 /* If both arguments are constants, evaluate at compile-time. */
11644 if (p1 && p2)
11646 const size_t r = strcspn (p1, p2);
11647 return size_int (r);
11650 /* If the first argument is "", return NULL_TREE. */
11651 if (p1 && *p1 == '\0')
11653 /* Evaluate and ignore argument s2 in case it has
11654 side-effects. */
11655 return omit_one_operand (size_type_node,
11656 size_zero_node, s2);
11659 /* If the second argument is "", return __builtin_strlen(s1). */
11660 if (p2 && *p2 == '\0')
11662 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11664 /* If the replacement _DECL isn't initialized, don't do the
11665 transformation. */
11666 if (!fn)
11667 return NULL_TREE;
11669 return build_call_expr (fn, 1, s1);
11671 return NULL_TREE;
11675 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11676 to the call. IGNORE is true if the value returned
11677 by the builtin will be ignored. UNLOCKED is true is true if this
11678 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11679 the known length of the string. Return NULL_TREE if no simplification
11680 was possible. */
11682 tree
11683 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11685 /* If we're using an unlocked function, assume the other unlocked
11686 functions exist explicitly. */
11687 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11688 : implicit_built_in_decls[BUILT_IN_FPUTC];
11689 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11690 : implicit_built_in_decls[BUILT_IN_FWRITE];
11692 /* If the return value is used, don't do the transformation. */
11693 if (!ignore)
11694 return NULL_TREE;
11696 /* Verify the arguments in the original call. */
11697 if (!validate_arg (arg0, POINTER_TYPE)
11698 || !validate_arg (arg1, POINTER_TYPE))
11699 return NULL_TREE;
11701 if (! len)
11702 len = c_strlen (arg0, 0);
11704 /* Get the length of the string passed to fputs. If the length
11705 can't be determined, punt. */
11706 if (!len
11707 || TREE_CODE (len) != INTEGER_CST)
11708 return NULL_TREE;
11710 switch (compare_tree_int (len, 1))
11712 case -1: /* length is 0, delete the call entirely . */
11713 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11715 case 0: /* length is 1, call fputc. */
11717 const char *p = c_getstr (arg0);
11719 if (p != NULL)
11721 if (fn_fputc)
11722 return build_call_expr (fn_fputc, 2,
11723 build_int_cst (NULL_TREE, p[0]), arg1);
11724 else
11725 return NULL_TREE;
11728 /* FALLTHROUGH */
11729 case 1: /* length is greater than 1, call fwrite. */
11731 /* If optimizing for size keep fputs. */
11732 if (optimize_function_for_size_p (cfun))
11733 return NULL_TREE;
11734 /* New argument list transforming fputs(string, stream) to
11735 fwrite(string, 1, len, stream). */
11736 if (fn_fwrite)
11737 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11738 else
11739 return NULL_TREE;
11741 default:
11742 gcc_unreachable ();
11744 return NULL_TREE;
11747 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11748 produced. False otherwise. This is done so that we don't output the error
11749 or warning twice or three times. */
11751 bool
11752 fold_builtin_next_arg (tree exp, bool va_start_p)
11754 tree fntype = TREE_TYPE (current_function_decl);
11755 int nargs = call_expr_nargs (exp);
11756 tree arg;
11758 if (TYPE_ARG_TYPES (fntype) == 0
11759 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11760 == void_type_node))
11762 error ("%<va_start%> used in function with fixed args");
11763 return true;
11766 if (va_start_p)
11768 if (va_start_p && (nargs != 2))
11770 error ("wrong number of arguments to function %<va_start%>");
11771 return true;
11773 arg = CALL_EXPR_ARG (exp, 1);
11775 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11776 when we checked the arguments and if needed issued a warning. */
11777 else
11779 if (nargs == 0)
11781 /* Evidently an out of date version of <stdarg.h>; can't validate
11782 va_start's second argument, but can still work as intended. */
11783 warning (0, "%<__builtin_next_arg%> called without an argument");
11784 return true;
11786 else if (nargs > 1)
11788 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11789 return true;
11791 arg = CALL_EXPR_ARG (exp, 0);
11794 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11795 or __builtin_next_arg (0) the first time we see it, after checking
11796 the arguments and if needed issuing a warning. */
11797 if (!integer_zerop (arg))
11799 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11801 /* Strip off all nops for the sake of the comparison. This
11802 is not quite the same as STRIP_NOPS. It does more.
11803 We must also strip off INDIRECT_EXPR for C++ reference
11804 parameters. */
11805 while (CONVERT_EXPR_P (arg)
11806 || TREE_CODE (arg) == INDIRECT_REF)
11807 arg = TREE_OPERAND (arg, 0);
11808 if (arg != last_parm)
11810 /* FIXME: Sometimes with the tree optimizers we can get the
11811 not the last argument even though the user used the last
11812 argument. We just warn and set the arg to be the last
11813 argument so that we will get wrong-code because of
11814 it. */
11815 warning (0, "second parameter of %<va_start%> not last named argument");
11818 /* Undefined by C99 7.15.1.4p4 (va_start):
11819 "If the parameter parmN is declared with the register storage
11820 class, with a function or array type, or with a type that is
11821 not compatible with the type that results after application of
11822 the default argument promotions, the behavior is undefined."
11824 else if (DECL_REGISTER (arg))
11825 warning (0, "undefined behaviour when second parameter of "
11826 "%<va_start%> is declared with %<register%> storage");
11828 /* We want to verify the second parameter just once before the tree
11829 optimizers are run and then avoid keeping it in the tree,
11830 as otherwise we could warn even for correct code like:
11831 void foo (int i, ...)
11832 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11833 if (va_start_p)
11834 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11835 else
11836 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11838 return false;
11842 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11843 ORIG may be null if this is a 2-argument call. We don't attempt to
11844 simplify calls with more than 3 arguments.
11846 Return NULL_TREE if no simplification was possible, otherwise return the
11847 simplified form of the call as a tree. If IGNORED is true, it means that
11848 the caller does not use the returned value of the function. */
11850 static tree
11851 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11853 tree call, retval;
11854 const char *fmt_str = NULL;
11856 /* Verify the required arguments in the original call. We deal with two
11857 types of sprintf() calls: 'sprintf (str, fmt)' and
11858 'sprintf (dest, "%s", orig)'. */
11859 if (!validate_arg (dest, POINTER_TYPE)
11860 || !validate_arg (fmt, POINTER_TYPE))
11861 return NULL_TREE;
11862 if (orig && !validate_arg (orig, POINTER_TYPE))
11863 return NULL_TREE;
11865 /* Check whether the format is a literal string constant. */
11866 fmt_str = c_getstr (fmt);
11867 if (fmt_str == NULL)
11868 return NULL_TREE;
11870 call = NULL_TREE;
11871 retval = NULL_TREE;
11873 if (!init_target_chars ())
11874 return NULL_TREE;
11876 /* If the format doesn't contain % args or %%, use strcpy. */
11877 if (strchr (fmt_str, target_percent) == NULL)
11879 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11881 if (!fn)
11882 return NULL_TREE;
11884 /* Don't optimize sprintf (buf, "abc", ptr++). */
11885 if (orig)
11886 return NULL_TREE;
11888 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11889 'format' is known to contain no % formats. */
11890 call = build_call_expr (fn, 2, dest, fmt);
11891 if (!ignored)
11892 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11895 /* If the format is "%s", use strcpy if the result isn't used. */
11896 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11898 tree fn;
11899 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11901 if (!fn)
11902 return NULL_TREE;
11904 /* Don't crash on sprintf (str1, "%s"). */
11905 if (!orig)
11906 return NULL_TREE;
11908 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11909 if (!ignored)
11911 retval = c_strlen (orig, 1);
11912 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11913 return NULL_TREE;
11915 call = build_call_expr (fn, 2, dest, orig);
11918 if (call && retval)
11920 retval = fold_convert
11921 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11922 retval);
11923 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11925 else
11926 return call;
11929 /* Expand a call EXP to __builtin_object_size. */
11932 expand_builtin_object_size (tree exp)
11934 tree ost;
11935 int object_size_type;
11936 tree fndecl = get_callee_fndecl (exp);
11938 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11940 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11941 exp, fndecl);
11942 expand_builtin_trap ();
11943 return const0_rtx;
11946 ost = CALL_EXPR_ARG (exp, 1);
11947 STRIP_NOPS (ost);
11949 if (TREE_CODE (ost) != INTEGER_CST
11950 || tree_int_cst_sgn (ost) < 0
11951 || compare_tree_int (ost, 3) > 0)
11953 error ("%Klast argument of %D is not integer constant between 0 and 3",
11954 exp, fndecl);
11955 expand_builtin_trap ();
11956 return const0_rtx;
11959 object_size_type = tree_low_cst (ost, 0);
11961 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11964 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11965 FCODE is the BUILT_IN_* to use.
11966 Return NULL_RTX if we failed; the caller should emit a normal call,
11967 otherwise try to get the result in TARGET, if convenient (and in
11968 mode MODE if that's convenient). */
11970 static rtx
11971 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11972 enum built_in_function fcode)
11974 tree dest, src, len, size;
11976 if (!validate_arglist (exp,
11977 POINTER_TYPE,
11978 fcode == BUILT_IN_MEMSET_CHK
11979 ? INTEGER_TYPE : POINTER_TYPE,
11980 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11981 return NULL_RTX;
11983 dest = CALL_EXPR_ARG (exp, 0);
11984 src = CALL_EXPR_ARG (exp, 1);
11985 len = CALL_EXPR_ARG (exp, 2);
11986 size = CALL_EXPR_ARG (exp, 3);
11988 if (! host_integerp (size, 1))
11989 return NULL_RTX;
11991 if (host_integerp (len, 1) || integer_all_onesp (size))
11993 tree fn;
11995 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11997 warning_at (tree_nonartificial_location (exp),
11998 0, "%Kcall to %D will always overflow destination buffer",
11999 exp, get_callee_fndecl (exp));
12000 return NULL_RTX;
12003 fn = NULL_TREE;
12004 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12005 mem{cpy,pcpy,move,set} is available. */
12006 switch (fcode)
12008 case BUILT_IN_MEMCPY_CHK:
12009 fn = built_in_decls[BUILT_IN_MEMCPY];
12010 break;
12011 case BUILT_IN_MEMPCPY_CHK:
12012 fn = built_in_decls[BUILT_IN_MEMPCPY];
12013 break;
12014 case BUILT_IN_MEMMOVE_CHK:
12015 fn = built_in_decls[BUILT_IN_MEMMOVE];
12016 break;
12017 case BUILT_IN_MEMSET_CHK:
12018 fn = built_in_decls[BUILT_IN_MEMSET];
12019 break;
12020 default:
12021 break;
12024 if (! fn)
12025 return NULL_RTX;
12027 fn = build_call_expr (fn, 3, dest, src, len);
12028 STRIP_TYPE_NOPS (fn);
12029 while (TREE_CODE (fn) == COMPOUND_EXPR)
12031 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12032 EXPAND_NORMAL);
12033 fn = TREE_OPERAND (fn, 1);
12035 if (TREE_CODE (fn) == CALL_EXPR)
12036 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12037 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12039 else if (fcode == BUILT_IN_MEMSET_CHK)
12040 return NULL_RTX;
12041 else
12043 unsigned int dest_align
12044 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12046 /* If DEST is not a pointer type, call the normal function. */
12047 if (dest_align == 0)
12048 return NULL_RTX;
12050 /* If SRC and DEST are the same (and not volatile), do nothing. */
12051 if (operand_equal_p (src, dest, 0))
12053 tree expr;
12055 if (fcode != BUILT_IN_MEMPCPY_CHK)
12057 /* Evaluate and ignore LEN in case it has side-effects. */
12058 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12059 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12062 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12063 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12066 /* __memmove_chk special case. */
12067 if (fcode == BUILT_IN_MEMMOVE_CHK)
12069 unsigned int src_align
12070 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12072 if (src_align == 0)
12073 return NULL_RTX;
12075 /* If src is categorized for a readonly section we can use
12076 normal __memcpy_chk. */
12077 if (readonly_data_expr (src))
12079 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12080 if (!fn)
12081 return NULL_RTX;
12082 fn = build_call_expr (fn, 4, dest, src, len, size);
12083 STRIP_TYPE_NOPS (fn);
12084 while (TREE_CODE (fn) == COMPOUND_EXPR)
12086 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12087 EXPAND_NORMAL);
12088 fn = TREE_OPERAND (fn, 1);
12090 if (TREE_CODE (fn) == CALL_EXPR)
12091 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12092 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12095 return NULL_RTX;
12099 /* Emit warning if a buffer overflow is detected at compile time. */
12101 static void
12102 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12104 int is_strlen = 0;
12105 tree len, size;
12106 location_t loc = tree_nonartificial_location (exp);
12108 switch (fcode)
12110 case BUILT_IN_STRCPY_CHK:
12111 case BUILT_IN_STPCPY_CHK:
12112 /* For __strcat_chk the warning will be emitted only if overflowing
12113 by at least strlen (dest) + 1 bytes. */
12114 case BUILT_IN_STRCAT_CHK:
12115 len = CALL_EXPR_ARG (exp, 1);
12116 size = CALL_EXPR_ARG (exp, 2);
12117 is_strlen = 1;
12118 break;
12119 case BUILT_IN_STRNCAT_CHK:
12120 case BUILT_IN_STRNCPY_CHK:
12121 len = CALL_EXPR_ARG (exp, 2);
12122 size = CALL_EXPR_ARG (exp, 3);
12123 break;
12124 case BUILT_IN_SNPRINTF_CHK:
12125 case BUILT_IN_VSNPRINTF_CHK:
12126 len = CALL_EXPR_ARG (exp, 1);
12127 size = CALL_EXPR_ARG (exp, 3);
12128 break;
12129 default:
12130 gcc_unreachable ();
12133 if (!len || !size)
12134 return;
12136 if (! host_integerp (size, 1) || integer_all_onesp (size))
12137 return;
12139 if (is_strlen)
12141 len = c_strlen (len, 1);
12142 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12143 return;
12145 else if (fcode == BUILT_IN_STRNCAT_CHK)
12147 tree src = CALL_EXPR_ARG (exp, 1);
12148 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12149 return;
12150 src = c_strlen (src, 1);
12151 if (! src || ! host_integerp (src, 1))
12153 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12154 exp, get_callee_fndecl (exp));
12155 return;
12157 else if (tree_int_cst_lt (src, size))
12158 return;
12160 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12161 return;
12163 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12164 exp, get_callee_fndecl (exp));
12167 /* Emit warning if a buffer overflow is detected at compile time
12168 in __sprintf_chk/__vsprintf_chk calls. */
12170 static void
12171 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12173 tree dest, size, len, fmt, flag;
12174 const char *fmt_str;
12175 int nargs = call_expr_nargs (exp);
12177 /* Verify the required arguments in the original call. */
12179 if (nargs < 4)
12180 return;
12181 dest = CALL_EXPR_ARG (exp, 0);
12182 flag = CALL_EXPR_ARG (exp, 1);
12183 size = CALL_EXPR_ARG (exp, 2);
12184 fmt = CALL_EXPR_ARG (exp, 3);
12186 if (! host_integerp (size, 1) || integer_all_onesp (size))
12187 return;
12189 /* Check whether the format is a literal string constant. */
12190 fmt_str = c_getstr (fmt);
12191 if (fmt_str == NULL)
12192 return;
12194 if (!init_target_chars ())
12195 return;
12197 /* If the format doesn't contain % args or %%, we know its size. */
12198 if (strchr (fmt_str, target_percent) == 0)
12199 len = build_int_cstu (size_type_node, strlen (fmt_str));
12200 /* If the format is "%s" and first ... argument is a string literal,
12201 we know it too. */
12202 else if (fcode == BUILT_IN_SPRINTF_CHK
12203 && strcmp (fmt_str, target_percent_s) == 0)
12205 tree arg;
12207 if (nargs < 5)
12208 return;
12209 arg = CALL_EXPR_ARG (exp, 4);
12210 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12211 return;
12213 len = c_strlen (arg, 1);
12214 if (!len || ! host_integerp (len, 1))
12215 return;
12217 else
12218 return;
12220 if (! tree_int_cst_lt (len, size))
12221 warning_at (tree_nonartificial_location (exp),
12222 0, "%Kcall to %D will always overflow destination buffer",
12223 exp, get_callee_fndecl (exp));
12226 /* Emit warning if a free is called with address of a variable. */
12228 static void
12229 maybe_emit_free_warning (tree exp)
12231 tree arg = CALL_EXPR_ARG (exp, 0);
12233 STRIP_NOPS (arg);
12234 if (TREE_CODE (arg) != ADDR_EXPR)
12235 return;
12237 arg = get_base_address (TREE_OPERAND (arg, 0));
12238 if (arg == NULL || INDIRECT_REF_P (arg))
12239 return;
12241 if (SSA_VAR_P (arg))
12242 warning_at (tree_nonartificial_location (exp),
12243 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12244 else
12245 warning_at (tree_nonartificial_location (exp),
12246 0, "%Kattempt to free a non-heap object", exp);
12249 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12250 if possible. */
12252 tree
12253 fold_builtin_object_size (tree ptr, tree ost)
12255 tree ret = NULL_TREE;
12256 int object_size_type;
12258 if (!validate_arg (ptr, POINTER_TYPE)
12259 || !validate_arg (ost, INTEGER_TYPE))
12260 return NULL_TREE;
12262 STRIP_NOPS (ost);
12264 if (TREE_CODE (ost) != INTEGER_CST
12265 || tree_int_cst_sgn (ost) < 0
12266 || compare_tree_int (ost, 3) > 0)
12267 return NULL_TREE;
12269 object_size_type = tree_low_cst (ost, 0);
12271 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12272 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12273 and (size_t) 0 for types 2 and 3. */
12274 if (TREE_SIDE_EFFECTS (ptr))
12275 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12277 if (TREE_CODE (ptr) == ADDR_EXPR)
12278 ret = build_int_cstu (size_type_node,
12279 compute_builtin_object_size (ptr, object_size_type));
12281 else if (TREE_CODE (ptr) == SSA_NAME)
12283 unsigned HOST_WIDE_INT bytes;
12285 /* If object size is not known yet, delay folding until
12286 later. Maybe subsequent passes will help determining
12287 it. */
12288 bytes = compute_builtin_object_size (ptr, object_size_type);
12289 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12290 ? -1 : 0))
12291 ret = build_int_cstu (size_type_node, bytes);
12294 if (ret)
12296 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12297 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12298 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12299 ret = NULL_TREE;
12302 return ret;
12305 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12306 DEST, SRC, LEN, and SIZE are the arguments to the call.
12307 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12308 code of the builtin. If MAXLEN is not NULL, it is maximum length
12309 passed as third argument. */
12311 tree
12312 fold_builtin_memory_chk (tree fndecl,
12313 tree dest, tree src, tree len, tree size,
12314 tree maxlen, bool ignore,
12315 enum built_in_function fcode)
12317 tree fn;
12319 if (!validate_arg (dest, POINTER_TYPE)
12320 || !validate_arg (src,
12321 (fcode == BUILT_IN_MEMSET_CHK
12322 ? INTEGER_TYPE : POINTER_TYPE))
12323 || !validate_arg (len, INTEGER_TYPE)
12324 || !validate_arg (size, INTEGER_TYPE))
12325 return NULL_TREE;
12327 /* If SRC and DEST are the same (and not volatile), return DEST
12328 (resp. DEST+LEN for __mempcpy_chk). */
12329 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12331 if (fcode != BUILT_IN_MEMPCPY_CHK)
12332 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12333 else
12335 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12336 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12340 if (! host_integerp (size, 1))
12341 return NULL_TREE;
12343 if (! integer_all_onesp (size))
12345 if (! host_integerp (len, 1))
12347 /* If LEN is not constant, try MAXLEN too.
12348 For MAXLEN only allow optimizing into non-_ocs function
12349 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12350 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12352 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12354 /* (void) __mempcpy_chk () can be optimized into
12355 (void) __memcpy_chk (). */
12356 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12357 if (!fn)
12358 return NULL_TREE;
12360 return build_call_expr (fn, 4, dest, src, len, size);
12362 return NULL_TREE;
12365 else
12366 maxlen = len;
12368 if (tree_int_cst_lt (size, maxlen))
12369 return NULL_TREE;
12372 fn = NULL_TREE;
12373 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12374 mem{cpy,pcpy,move,set} is available. */
12375 switch (fcode)
12377 case BUILT_IN_MEMCPY_CHK:
12378 fn = built_in_decls[BUILT_IN_MEMCPY];
12379 break;
12380 case BUILT_IN_MEMPCPY_CHK:
12381 fn = built_in_decls[BUILT_IN_MEMPCPY];
12382 break;
12383 case BUILT_IN_MEMMOVE_CHK:
12384 fn = built_in_decls[BUILT_IN_MEMMOVE];
12385 break;
12386 case BUILT_IN_MEMSET_CHK:
12387 fn = built_in_decls[BUILT_IN_MEMSET];
12388 break;
12389 default:
12390 break;
12393 if (!fn)
12394 return NULL_TREE;
12396 return build_call_expr (fn, 3, dest, src, len);
12399 /* Fold a call to the __st[rp]cpy_chk builtin.
12400 DEST, SRC, and SIZE are the arguments to the call.
12401 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12402 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12403 strings passed as second argument. */
12405 tree
12406 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12407 tree maxlen, bool ignore,
12408 enum built_in_function fcode)
12410 tree len, fn;
12412 if (!validate_arg (dest, POINTER_TYPE)
12413 || !validate_arg (src, POINTER_TYPE)
12414 || !validate_arg (size, INTEGER_TYPE))
12415 return NULL_TREE;
12417 /* If SRC and DEST are the same (and not volatile), return DEST. */
12418 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12419 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12421 if (! host_integerp (size, 1))
12422 return NULL_TREE;
12424 if (! integer_all_onesp (size))
12426 len = c_strlen (src, 1);
12427 if (! len || ! host_integerp (len, 1))
12429 /* If LEN is not constant, try MAXLEN too.
12430 For MAXLEN only allow optimizing into non-_ocs function
12431 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12432 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12434 if (fcode == BUILT_IN_STPCPY_CHK)
12436 if (! ignore)
12437 return NULL_TREE;
12439 /* If return value of __stpcpy_chk is ignored,
12440 optimize into __strcpy_chk. */
12441 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12442 if (!fn)
12443 return NULL_TREE;
12445 return build_call_expr (fn, 3, dest, src, size);
12448 if (! len || TREE_SIDE_EFFECTS (len))
12449 return NULL_TREE;
12451 /* If c_strlen returned something, but not a constant,
12452 transform __strcpy_chk into __memcpy_chk. */
12453 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12454 if (!fn)
12455 return NULL_TREE;
12457 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12458 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12459 build_call_expr (fn, 4,
12460 dest, src, len, size));
12463 else
12464 maxlen = len;
12466 if (! tree_int_cst_lt (maxlen, size))
12467 return NULL_TREE;
12470 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12471 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12472 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12473 if (!fn)
12474 return NULL_TREE;
12476 return build_call_expr (fn, 2, dest, src);
12479 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12480 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12481 length passed as third argument. */
12483 tree
12484 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12485 tree maxlen)
12487 tree fn;
12489 if (!validate_arg (dest, POINTER_TYPE)
12490 || !validate_arg (src, POINTER_TYPE)
12491 || !validate_arg (len, INTEGER_TYPE)
12492 || !validate_arg (size, INTEGER_TYPE))
12493 return NULL_TREE;
12495 if (! host_integerp (size, 1))
12496 return NULL_TREE;
12498 if (! integer_all_onesp (size))
12500 if (! host_integerp (len, 1))
12502 /* If LEN is not constant, try MAXLEN too.
12503 For MAXLEN only allow optimizing into non-_ocs function
12504 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12505 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12506 return NULL_TREE;
12508 else
12509 maxlen = len;
12511 if (tree_int_cst_lt (size, maxlen))
12512 return NULL_TREE;
12515 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12516 fn = built_in_decls[BUILT_IN_STRNCPY];
12517 if (!fn)
12518 return NULL_TREE;
12520 return build_call_expr (fn, 3, dest, src, len);
12523 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12524 are the arguments to the call. */
12526 static tree
12527 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12529 tree fn;
12530 const char *p;
12532 if (!validate_arg (dest, POINTER_TYPE)
12533 || !validate_arg (src, POINTER_TYPE)
12534 || !validate_arg (size, INTEGER_TYPE))
12535 return NULL_TREE;
12537 p = c_getstr (src);
12538 /* If the SRC parameter is "", return DEST. */
12539 if (p && *p == '\0')
12540 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12542 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12543 return NULL_TREE;
12545 /* If __builtin_strcat_chk is used, assume strcat is available. */
12546 fn = built_in_decls[BUILT_IN_STRCAT];
12547 if (!fn)
12548 return NULL_TREE;
12550 return build_call_expr (fn, 2, dest, src);
12553 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12554 LEN, and SIZE. */
12556 static tree
12557 fold_builtin_strncat_chk (tree fndecl,
12558 tree dest, tree src, tree len, tree size)
12560 tree fn;
12561 const char *p;
12563 if (!validate_arg (dest, POINTER_TYPE)
12564 || !validate_arg (src, POINTER_TYPE)
12565 || !validate_arg (size, INTEGER_TYPE)
12566 || !validate_arg (size, INTEGER_TYPE))
12567 return NULL_TREE;
12569 p = c_getstr (src);
12570 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12571 if (p && *p == '\0')
12572 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12573 else if (integer_zerop (len))
12574 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12576 if (! host_integerp (size, 1))
12577 return NULL_TREE;
12579 if (! integer_all_onesp (size))
12581 tree src_len = c_strlen (src, 1);
12582 if (src_len
12583 && host_integerp (src_len, 1)
12584 && host_integerp (len, 1)
12585 && ! tree_int_cst_lt (len, src_len))
12587 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12588 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12589 if (!fn)
12590 return NULL_TREE;
12592 return build_call_expr (fn, 3, dest, src, size);
12594 return NULL_TREE;
12597 /* If __builtin_strncat_chk is used, assume strncat is available. */
12598 fn = built_in_decls[BUILT_IN_STRNCAT];
12599 if (!fn)
12600 return NULL_TREE;
12602 return build_call_expr (fn, 3, dest, src, len);
12605 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12606 a normal call should be emitted rather than expanding the function
12607 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12609 static tree
12610 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12612 tree dest, size, len, fn, fmt, flag;
12613 const char *fmt_str;
12614 int nargs = call_expr_nargs (exp);
12616 /* Verify the required arguments in the original call. */
12617 if (nargs < 4)
12618 return NULL_TREE;
12619 dest = CALL_EXPR_ARG (exp, 0);
12620 if (!validate_arg (dest, POINTER_TYPE))
12621 return NULL_TREE;
12622 flag = CALL_EXPR_ARG (exp, 1);
12623 if (!validate_arg (flag, INTEGER_TYPE))
12624 return NULL_TREE;
12625 size = CALL_EXPR_ARG (exp, 2);
12626 if (!validate_arg (size, INTEGER_TYPE))
12627 return NULL_TREE;
12628 fmt = CALL_EXPR_ARG (exp, 3);
12629 if (!validate_arg (fmt, POINTER_TYPE))
12630 return NULL_TREE;
12632 if (! host_integerp (size, 1))
12633 return NULL_TREE;
12635 len = NULL_TREE;
12637 if (!init_target_chars ())
12638 return NULL_TREE;
12640 /* Check whether the format is a literal string constant. */
12641 fmt_str = c_getstr (fmt);
12642 if (fmt_str != NULL)
12644 /* If the format doesn't contain % args or %%, we know the size. */
12645 if (strchr (fmt_str, target_percent) == 0)
12647 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12648 len = build_int_cstu (size_type_node, strlen (fmt_str));
12650 /* If the format is "%s" and first ... argument is a string literal,
12651 we know the size too. */
12652 else if (fcode == BUILT_IN_SPRINTF_CHK
12653 && strcmp (fmt_str, target_percent_s) == 0)
12655 tree arg;
12657 if (nargs == 5)
12659 arg = CALL_EXPR_ARG (exp, 4);
12660 if (validate_arg (arg, POINTER_TYPE))
12662 len = c_strlen (arg, 1);
12663 if (! len || ! host_integerp (len, 1))
12664 len = NULL_TREE;
12670 if (! integer_all_onesp (size))
12672 if (! len || ! tree_int_cst_lt (len, size))
12673 return NULL_TREE;
12676 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12677 or if format doesn't contain % chars or is "%s". */
12678 if (! integer_zerop (flag))
12680 if (fmt_str == NULL)
12681 return NULL_TREE;
12682 if (strchr (fmt_str, target_percent) != NULL
12683 && strcmp (fmt_str, target_percent_s))
12684 return NULL_TREE;
12687 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12688 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12689 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12690 if (!fn)
12691 return NULL_TREE;
12693 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12696 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12697 a normal call should be emitted rather than expanding the function
12698 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12699 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12700 passed as second argument. */
12702 tree
12703 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12704 enum built_in_function fcode)
12706 tree dest, size, len, fn, fmt, flag;
12707 const char *fmt_str;
12709 /* Verify the required arguments in the original call. */
12710 if (call_expr_nargs (exp) < 5)
12711 return NULL_TREE;
12712 dest = CALL_EXPR_ARG (exp, 0);
12713 if (!validate_arg (dest, POINTER_TYPE))
12714 return NULL_TREE;
12715 len = CALL_EXPR_ARG (exp, 1);
12716 if (!validate_arg (len, INTEGER_TYPE))
12717 return NULL_TREE;
12718 flag = CALL_EXPR_ARG (exp, 2);
12719 if (!validate_arg (flag, INTEGER_TYPE))
12720 return NULL_TREE;
12721 size = CALL_EXPR_ARG (exp, 3);
12722 if (!validate_arg (size, INTEGER_TYPE))
12723 return NULL_TREE;
12724 fmt = CALL_EXPR_ARG (exp, 4);
12725 if (!validate_arg (fmt, POINTER_TYPE))
12726 return NULL_TREE;
12728 if (! host_integerp (size, 1))
12729 return NULL_TREE;
12731 if (! integer_all_onesp (size))
12733 if (! host_integerp (len, 1))
12735 /* If LEN is not constant, try MAXLEN too.
12736 For MAXLEN only allow optimizing into non-_ocs function
12737 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12738 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12739 return NULL_TREE;
12741 else
12742 maxlen = len;
12744 if (tree_int_cst_lt (size, maxlen))
12745 return NULL_TREE;
12748 if (!init_target_chars ())
12749 return NULL_TREE;
12751 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12752 or if format doesn't contain % chars or is "%s". */
12753 if (! integer_zerop (flag))
12755 fmt_str = c_getstr (fmt);
12756 if (fmt_str == NULL)
12757 return NULL_TREE;
12758 if (strchr (fmt_str, target_percent) != NULL
12759 && strcmp (fmt_str, target_percent_s))
12760 return NULL_TREE;
12763 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12764 available. */
12765 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12766 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12767 if (!fn)
12768 return NULL_TREE;
12770 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12773 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12774 FMT and ARG are the arguments to the call; we don't fold cases with
12775 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12777 Return NULL_TREE if no simplification was possible, otherwise return the
12778 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12779 code of the function to be simplified. */
12781 static tree
12782 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12783 enum built_in_function fcode)
12785 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12786 const char *fmt_str = NULL;
12788 /* If the return value is used, don't do the transformation. */
12789 if (! ignore)
12790 return NULL_TREE;
12792 /* Verify the required arguments in the original call. */
12793 if (!validate_arg (fmt, POINTER_TYPE))
12794 return NULL_TREE;
12796 /* Check whether the format is a literal string constant. */
12797 fmt_str = c_getstr (fmt);
12798 if (fmt_str == NULL)
12799 return NULL_TREE;
12801 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12803 /* If we're using an unlocked function, assume the other
12804 unlocked functions exist explicitly. */
12805 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12806 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12808 else
12810 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12811 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12814 if (!init_target_chars ())
12815 return NULL_TREE;
12817 if (strcmp (fmt_str, target_percent_s) == 0
12818 || strchr (fmt_str, target_percent) == NULL)
12820 const char *str;
12822 if (strcmp (fmt_str, target_percent_s) == 0)
12824 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12825 return NULL_TREE;
12827 if (!arg || !validate_arg (arg, POINTER_TYPE))
12828 return NULL_TREE;
12830 str = c_getstr (arg);
12831 if (str == NULL)
12832 return NULL_TREE;
12834 else
12836 /* The format specifier doesn't contain any '%' characters. */
12837 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12838 && arg)
12839 return NULL_TREE;
12840 str = fmt_str;
12843 /* If the string was "", printf does nothing. */
12844 if (str[0] == '\0')
12845 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12847 /* If the string has length of 1, call putchar. */
12848 if (str[1] == '\0')
12850 /* Given printf("c"), (where c is any one character,)
12851 convert "c"[0] to an int and pass that to the replacement
12852 function. */
12853 newarg = build_int_cst (NULL_TREE, str[0]);
12854 if (fn_putchar)
12855 call = build_call_expr (fn_putchar, 1, newarg);
12857 else
12859 /* If the string was "string\n", call puts("string"). */
12860 size_t len = strlen (str);
12861 if ((unsigned char)str[len - 1] == target_newline)
12863 /* Create a NUL-terminated string that's one char shorter
12864 than the original, stripping off the trailing '\n'. */
12865 char *newstr = XALLOCAVEC (char, len);
12866 memcpy (newstr, str, len - 1);
12867 newstr[len - 1] = 0;
12869 newarg = build_string_literal (len, newstr);
12870 if (fn_puts)
12871 call = build_call_expr (fn_puts, 1, newarg);
12873 else
12874 /* We'd like to arrange to call fputs(string,stdout) here,
12875 but we need stdout and don't have a way to get it yet. */
12876 return NULL_TREE;
12880 /* The other optimizations can be done only on the non-va_list variants. */
12881 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12882 return NULL_TREE;
12884 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12885 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12887 if (!arg || !validate_arg (arg, POINTER_TYPE))
12888 return NULL_TREE;
12889 if (fn_puts)
12890 call = build_call_expr (fn_puts, 1, arg);
12893 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12894 else if (strcmp (fmt_str, target_percent_c) == 0)
12896 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12897 return NULL_TREE;
12898 if (fn_putchar)
12899 call = build_call_expr (fn_putchar, 1, arg);
12902 if (!call)
12903 return NULL_TREE;
12905 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12908 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12909 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12910 more than 3 arguments, and ARG may be null in the 2-argument case.
12912 Return NULL_TREE if no simplification was possible, otherwise return the
12913 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12914 code of the function to be simplified. */
12916 static tree
12917 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12918 enum built_in_function fcode)
12920 tree fn_fputc, fn_fputs, call = NULL_TREE;
12921 const char *fmt_str = NULL;
12923 /* If the return value is used, don't do the transformation. */
12924 if (! ignore)
12925 return NULL_TREE;
12927 /* Verify the required arguments in the original call. */
12928 if (!validate_arg (fp, POINTER_TYPE))
12929 return NULL_TREE;
12930 if (!validate_arg (fmt, POINTER_TYPE))
12931 return NULL_TREE;
12933 /* Check whether the format is a literal string constant. */
12934 fmt_str = c_getstr (fmt);
12935 if (fmt_str == NULL)
12936 return NULL_TREE;
12938 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12940 /* If we're using an unlocked function, assume the other
12941 unlocked functions exist explicitly. */
12942 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12943 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12945 else
12947 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12948 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12951 if (!init_target_chars ())
12952 return NULL_TREE;
12954 /* If the format doesn't contain % args or %%, use strcpy. */
12955 if (strchr (fmt_str, target_percent) == NULL)
12957 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12958 && arg)
12959 return NULL_TREE;
12961 /* If the format specifier was "", fprintf does nothing. */
12962 if (fmt_str[0] == '\0')
12964 /* If FP has side-effects, just wait until gimplification is
12965 done. */
12966 if (TREE_SIDE_EFFECTS (fp))
12967 return NULL_TREE;
12969 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12972 /* When "string" doesn't contain %, replace all cases of
12973 fprintf (fp, string) with fputs (string, fp). The fputs
12974 builtin will take care of special cases like length == 1. */
12975 if (fn_fputs)
12976 call = build_call_expr (fn_fputs, 2, fmt, fp);
12979 /* The other optimizations can be done only on the non-va_list variants. */
12980 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12981 return NULL_TREE;
12983 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12984 else if (strcmp (fmt_str, target_percent_s) == 0)
12986 if (!arg || !validate_arg (arg, POINTER_TYPE))
12987 return NULL_TREE;
12988 if (fn_fputs)
12989 call = build_call_expr (fn_fputs, 2, arg, fp);
12992 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12993 else if (strcmp (fmt_str, target_percent_c) == 0)
12995 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12996 return NULL_TREE;
12997 if (fn_fputc)
12998 call = build_call_expr (fn_fputc, 2, arg, fp);
13001 if (!call)
13002 return NULL_TREE;
13003 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
13006 /* Initialize format string characters in the target charset. */
13008 static bool
13009 init_target_chars (void)
13011 static bool init;
13012 if (!init)
13014 target_newline = lang_hooks.to_target_charset ('\n');
13015 target_percent = lang_hooks.to_target_charset ('%');
13016 target_c = lang_hooks.to_target_charset ('c');
13017 target_s = lang_hooks.to_target_charset ('s');
13018 if (target_newline == 0 || target_percent == 0 || target_c == 0
13019 || target_s == 0)
13020 return false;
13022 target_percent_c[0] = target_percent;
13023 target_percent_c[1] = target_c;
13024 target_percent_c[2] = '\0';
13026 target_percent_s[0] = target_percent;
13027 target_percent_s[1] = target_s;
13028 target_percent_s[2] = '\0';
13030 target_percent_s_newline[0] = target_percent;
13031 target_percent_s_newline[1] = target_s;
13032 target_percent_s_newline[2] = target_newline;
13033 target_percent_s_newline[3] = '\0';
13035 init = true;
13037 return true;
13040 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13041 and no overflow/underflow occurred. INEXACT is true if M was not
13042 exactly calculated. TYPE is the tree type for the result. This
13043 function assumes that you cleared the MPFR flags and then
13044 calculated M to see if anything subsequently set a flag prior to
13045 entering this function. Return NULL_TREE if any checks fail. */
13047 static tree
13048 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13050 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13051 overflow/underflow occurred. If -frounding-math, proceed iff the
13052 result of calling FUNC was exact. */
13053 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13054 && (!flag_rounding_math || !inexact))
13056 REAL_VALUE_TYPE rr;
13058 real_from_mpfr (&rr, m, type, GMP_RNDN);
13059 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13060 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13061 but the mpft_t is not, then we underflowed in the
13062 conversion. */
13063 if (real_isfinite (&rr)
13064 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13066 REAL_VALUE_TYPE rmode;
13068 real_convert (&rmode, TYPE_MODE (type), &rr);
13069 /* Proceed iff the specified mode can hold the value. */
13070 if (real_identical (&rmode, &rr))
13071 return build_real (type, rmode);
13074 return NULL_TREE;
13077 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13078 FUNC on it and return the resulting value as a tree with type TYPE.
13079 If MIN and/or MAX are not NULL, then the supplied ARG must be
13080 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13081 acceptable values, otherwise they are not. The mpfr precision is
13082 set to the precision of TYPE. We assume that function FUNC returns
13083 zero if the result could be calculated exactly within the requested
13084 precision. */
13086 static tree
13087 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13088 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13089 bool inclusive)
13091 tree result = NULL_TREE;
13093 STRIP_NOPS (arg);
13095 /* To proceed, MPFR must exactly represent the target floating point
13096 format, which only happens when the target base equals two. */
13097 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13098 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13100 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13102 if (real_isfinite (ra)
13103 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13104 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13106 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13107 const int prec = fmt->p;
13108 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13109 int inexact;
13110 mpfr_t m;
13112 mpfr_init2 (m, prec);
13113 mpfr_from_real (m, ra, GMP_RNDN);
13114 mpfr_clear_flags ();
13115 inexact = func (m, m, rnd);
13116 result = do_mpfr_ckconv (m, type, inexact);
13117 mpfr_clear (m);
13121 return result;
13124 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13125 FUNC on it and return the resulting value as a tree with type TYPE.
13126 The mpfr precision is set to the precision of TYPE. We assume that
13127 function FUNC returns zero if the result could be calculated
13128 exactly within the requested precision. */
13130 static tree
13131 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13132 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13134 tree result = NULL_TREE;
13136 STRIP_NOPS (arg1);
13137 STRIP_NOPS (arg2);
13139 /* To proceed, MPFR must exactly represent the target floating point
13140 format, which only happens when the target base equals two. */
13141 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13142 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13143 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13145 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13146 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13148 if (real_isfinite (ra1) && real_isfinite (ra2))
13150 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13151 const int prec = fmt->p;
13152 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13153 int inexact;
13154 mpfr_t m1, m2;
13156 mpfr_inits2 (prec, m1, m2, NULL);
13157 mpfr_from_real (m1, ra1, GMP_RNDN);
13158 mpfr_from_real (m2, ra2, GMP_RNDN);
13159 mpfr_clear_flags ();
13160 inexact = func (m1, m1, m2, rnd);
13161 result = do_mpfr_ckconv (m1, type, inexact);
13162 mpfr_clears (m1, m2, NULL);
13166 return result;
13169 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13170 FUNC on it and return the resulting value as a tree with type TYPE.
13171 The mpfr precision is set to the precision of TYPE. We assume that
13172 function FUNC returns zero if the result could be calculated
13173 exactly within the requested precision. */
13175 static tree
13176 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13177 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13179 tree result = NULL_TREE;
13181 STRIP_NOPS (arg1);
13182 STRIP_NOPS (arg2);
13183 STRIP_NOPS (arg3);
13185 /* To proceed, MPFR must exactly represent the target floating point
13186 format, which only happens when the target base equals two. */
13187 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13188 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13189 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13190 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13192 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13193 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13194 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13196 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13198 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13199 const int prec = fmt->p;
13200 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13201 int inexact;
13202 mpfr_t m1, m2, m3;
13204 mpfr_inits2 (prec, m1, m2, m3, NULL);
13205 mpfr_from_real (m1, ra1, GMP_RNDN);
13206 mpfr_from_real (m2, ra2, GMP_RNDN);
13207 mpfr_from_real (m3, ra3, GMP_RNDN);
13208 mpfr_clear_flags ();
13209 inexact = func (m1, m1, m2, m3, rnd);
13210 result = do_mpfr_ckconv (m1, type, inexact);
13211 mpfr_clears (m1, m2, m3, NULL);
13215 return result;
13218 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13219 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13220 If ARG_SINP and ARG_COSP are NULL then the result is returned
13221 as a complex value.
13222 The type is taken from the type of ARG and is used for setting the
13223 precision of the calculation and results. */
13225 static tree
13226 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13228 tree const type = TREE_TYPE (arg);
13229 tree result = NULL_TREE;
13231 STRIP_NOPS (arg);
13233 /* To proceed, MPFR must exactly represent the target floating point
13234 format, which only happens when the target base equals two. */
13235 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13236 && TREE_CODE (arg) == REAL_CST
13237 && !TREE_OVERFLOW (arg))
13239 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13241 if (real_isfinite (ra))
13243 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13244 const int prec = fmt->p;
13245 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13246 tree result_s, result_c;
13247 int inexact;
13248 mpfr_t m, ms, mc;
13250 mpfr_inits2 (prec, m, ms, mc, NULL);
13251 mpfr_from_real (m, ra, GMP_RNDN);
13252 mpfr_clear_flags ();
13253 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13254 result_s = do_mpfr_ckconv (ms, type, inexact);
13255 result_c = do_mpfr_ckconv (mc, type, inexact);
13256 mpfr_clears (m, ms, mc, NULL);
13257 if (result_s && result_c)
13259 /* If we are to return in a complex value do so. */
13260 if (!arg_sinp && !arg_cosp)
13261 return build_complex (build_complex_type (type),
13262 result_c, result_s);
13264 /* Dereference the sin/cos pointer arguments. */
13265 arg_sinp = build_fold_indirect_ref (arg_sinp);
13266 arg_cosp = build_fold_indirect_ref (arg_cosp);
13267 /* Proceed if valid pointer type were passed in. */
13268 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13269 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13271 /* Set the values. */
13272 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13273 result_s);
13274 TREE_SIDE_EFFECTS (result_s) = 1;
13275 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13276 result_c);
13277 TREE_SIDE_EFFECTS (result_c) = 1;
13278 /* Combine the assignments into a compound expr. */
13279 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13280 result_s, result_c));
13285 return result;
13288 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13289 two-argument mpfr order N Bessel function FUNC on them and return
13290 the resulting value as a tree with type TYPE. The mpfr precision
13291 is set to the precision of TYPE. We assume that function FUNC
13292 returns zero if the result could be calculated exactly within the
13293 requested precision. */
13294 static tree
13295 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13296 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13297 const REAL_VALUE_TYPE *min, bool inclusive)
13299 tree result = NULL_TREE;
13301 STRIP_NOPS (arg1);
13302 STRIP_NOPS (arg2);
13304 /* To proceed, MPFR must exactly represent the target floating point
13305 format, which only happens when the target base equals two. */
13306 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13307 && host_integerp (arg1, 0)
13308 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13310 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13311 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13313 if (n == (long)n
13314 && real_isfinite (ra)
13315 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13317 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13318 const int prec = fmt->p;
13319 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13320 int inexact;
13321 mpfr_t m;
13323 mpfr_init2 (m, prec);
13324 mpfr_from_real (m, ra, GMP_RNDN);
13325 mpfr_clear_flags ();
13326 inexact = func (m, n, m, rnd);
13327 result = do_mpfr_ckconv (m, type, inexact);
13328 mpfr_clear (m);
13332 return result;
13335 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13336 the pointer *(ARG_QUO) and return the result. The type is taken
13337 from the type of ARG0 and is used for setting the precision of the
13338 calculation and results. */
13340 static tree
13341 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13343 tree const type = TREE_TYPE (arg0);
13344 tree result = NULL_TREE;
13346 STRIP_NOPS (arg0);
13347 STRIP_NOPS (arg1);
13349 /* To proceed, MPFR must exactly represent the target floating point
13350 format, which only happens when the target base equals two. */
13351 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13352 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13353 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13355 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13356 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13358 if (real_isfinite (ra0) && real_isfinite (ra1))
13360 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13361 const int prec = fmt->p;
13362 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13363 tree result_rem;
13364 long integer_quo;
13365 mpfr_t m0, m1;
13367 mpfr_inits2 (prec, m0, m1, NULL);
13368 mpfr_from_real (m0, ra0, GMP_RNDN);
13369 mpfr_from_real (m1, ra1, GMP_RNDN);
13370 mpfr_clear_flags ();
13371 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13372 /* Remquo is independent of the rounding mode, so pass
13373 inexact=0 to do_mpfr_ckconv(). */
13374 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13375 mpfr_clears (m0, m1, NULL);
13376 if (result_rem)
13378 /* MPFR calculates quo in the host's long so it may
13379 return more bits in quo than the target int can hold
13380 if sizeof(host long) > sizeof(target int). This can
13381 happen even for native compilers in LP64 mode. In
13382 these cases, modulo the quo value with the largest
13383 number that the target int can hold while leaving one
13384 bit for the sign. */
13385 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13386 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13388 /* Dereference the quo pointer argument. */
13389 arg_quo = build_fold_indirect_ref (arg_quo);
13390 /* Proceed iff a valid pointer type was passed in. */
13391 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13393 /* Set the value. */
13394 tree result_quo = fold_build2 (MODIFY_EXPR,
13395 TREE_TYPE (arg_quo), arg_quo,
13396 build_int_cst (NULL, integer_quo));
13397 TREE_SIDE_EFFECTS (result_quo) = 1;
13398 /* Combine the quo assignment with the rem. */
13399 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13400 result_quo, result_rem));
13405 return result;
13408 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13409 resulting value as a tree with type TYPE. The mpfr precision is
13410 set to the precision of TYPE. We assume that this mpfr function
13411 returns zero if the result could be calculated exactly within the
13412 requested precision. In addition, the integer pointer represented
13413 by ARG_SG will be dereferenced and set to the appropriate signgam
13414 (-1,1) value. */
13416 static tree
13417 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13419 tree result = NULL_TREE;
13421 STRIP_NOPS (arg);
13423 /* To proceed, MPFR must exactly represent the target floating point
13424 format, which only happens when the target base equals two. Also
13425 verify ARG is a constant and that ARG_SG is an int pointer. */
13426 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13427 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13428 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13429 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13431 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13433 /* In addition to NaN and Inf, the argument cannot be zero or a
13434 negative integer. */
13435 if (real_isfinite (ra)
13436 && ra->cl != rvc_zero
13437 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13439 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13440 const int prec = fmt->p;
13441 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13442 int inexact, sg;
13443 mpfr_t m;
13444 tree result_lg;
13446 mpfr_init2 (m, prec);
13447 mpfr_from_real (m, ra, GMP_RNDN);
13448 mpfr_clear_flags ();
13449 inexact = mpfr_lgamma (m, &sg, m, rnd);
13450 result_lg = do_mpfr_ckconv (m, type, inexact);
13451 mpfr_clear (m);
13452 if (result_lg)
13454 tree result_sg;
13456 /* Dereference the arg_sg pointer argument. */
13457 arg_sg = build_fold_indirect_ref (arg_sg);
13458 /* Assign the signgam value into *arg_sg. */
13459 result_sg = fold_build2 (MODIFY_EXPR,
13460 TREE_TYPE (arg_sg), arg_sg,
13461 build_int_cst (NULL, sg));
13462 TREE_SIDE_EFFECTS (result_sg) = 1;
13463 /* Combine the signgam assignment with the lgamma result. */
13464 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13465 result_sg, result_lg));
13470 return result;
13473 /* FIXME tuples.
13474 The functions below provide an alternate interface for folding
13475 builtin function calls presented as GIMPLE_CALL statements rather
13476 than as CALL_EXPRs. The folded result is still expressed as a
13477 tree. There is too much code duplication in the handling of
13478 varargs functions, and a more intrusive re-factoring would permit
13479 better sharing of code between the tree and statement-based
13480 versions of these functions. */
13482 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13483 along with N new arguments specified as the "..." parameters. SKIP
13484 is the number of arguments in STMT to be omitted. This function is used
13485 to do varargs-to-varargs transformations. */
13487 static tree
13488 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13490 int oldnargs = gimple_call_num_args (stmt);
13491 int nargs = oldnargs - skip + n;
13492 tree fntype = TREE_TYPE (fndecl);
13493 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13494 tree *buffer;
13495 int i, j;
13496 va_list ap;
13498 buffer = XALLOCAVEC (tree, nargs);
13499 va_start (ap, n);
13500 for (i = 0; i < n; i++)
13501 buffer[i] = va_arg (ap, tree);
13502 va_end (ap);
13503 for (j = skip; j < oldnargs; j++, i++)
13504 buffer[i] = gimple_call_arg (stmt, j);
13506 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13509 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13510 a normal call should be emitted rather than expanding the function
13511 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13513 static tree
13514 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13516 tree dest, size, len, fn, fmt, flag;
13517 const char *fmt_str;
13518 int nargs = gimple_call_num_args (stmt);
13520 /* Verify the required arguments in the original call. */
13521 if (nargs < 4)
13522 return NULL_TREE;
13523 dest = gimple_call_arg (stmt, 0);
13524 if (!validate_arg (dest, POINTER_TYPE))
13525 return NULL_TREE;
13526 flag = gimple_call_arg (stmt, 1);
13527 if (!validate_arg (flag, INTEGER_TYPE))
13528 return NULL_TREE;
13529 size = gimple_call_arg (stmt, 2);
13530 if (!validate_arg (size, INTEGER_TYPE))
13531 return NULL_TREE;
13532 fmt = gimple_call_arg (stmt, 3);
13533 if (!validate_arg (fmt, POINTER_TYPE))
13534 return NULL_TREE;
13536 if (! host_integerp (size, 1))
13537 return NULL_TREE;
13539 len = NULL_TREE;
13541 if (!init_target_chars ())
13542 return NULL_TREE;
13544 /* Check whether the format is a literal string constant. */
13545 fmt_str = c_getstr (fmt);
13546 if (fmt_str != NULL)
13548 /* If the format doesn't contain % args or %%, we know the size. */
13549 if (strchr (fmt_str, target_percent) == 0)
13551 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13552 len = build_int_cstu (size_type_node, strlen (fmt_str));
13554 /* If the format is "%s" and first ... argument is a string literal,
13555 we know the size too. */
13556 else if (fcode == BUILT_IN_SPRINTF_CHK
13557 && strcmp (fmt_str, target_percent_s) == 0)
13559 tree arg;
13561 if (nargs == 5)
13563 arg = gimple_call_arg (stmt, 4);
13564 if (validate_arg (arg, POINTER_TYPE))
13566 len = c_strlen (arg, 1);
13567 if (! len || ! host_integerp (len, 1))
13568 len = NULL_TREE;
13574 if (! integer_all_onesp (size))
13576 if (! len || ! tree_int_cst_lt (len, size))
13577 return NULL_TREE;
13580 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13581 or if format doesn't contain % chars or is "%s". */
13582 if (! integer_zerop (flag))
13584 if (fmt_str == NULL)
13585 return NULL_TREE;
13586 if (strchr (fmt_str, target_percent) != NULL
13587 && strcmp (fmt_str, target_percent_s))
13588 return NULL_TREE;
13591 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13592 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13593 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13594 if (!fn)
13595 return NULL_TREE;
13597 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13600 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13601 a normal call should be emitted rather than expanding the function
13602 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13603 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13604 passed as second argument. */
13606 tree
13607 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13608 enum built_in_function fcode)
13610 tree dest, size, len, fn, fmt, flag;
13611 const char *fmt_str;
13613 /* Verify the required arguments in the original call. */
13614 if (gimple_call_num_args (stmt) < 5)
13615 return NULL_TREE;
13616 dest = gimple_call_arg (stmt, 0);
13617 if (!validate_arg (dest, POINTER_TYPE))
13618 return NULL_TREE;
13619 len = gimple_call_arg (stmt, 1);
13620 if (!validate_arg (len, INTEGER_TYPE))
13621 return NULL_TREE;
13622 flag = gimple_call_arg (stmt, 2);
13623 if (!validate_arg (flag, INTEGER_TYPE))
13624 return NULL_TREE;
13625 size = gimple_call_arg (stmt, 3);
13626 if (!validate_arg (size, INTEGER_TYPE))
13627 return NULL_TREE;
13628 fmt = gimple_call_arg (stmt, 4);
13629 if (!validate_arg (fmt, POINTER_TYPE))
13630 return NULL_TREE;
13632 if (! host_integerp (size, 1))
13633 return NULL_TREE;
13635 if (! integer_all_onesp (size))
13637 if (! host_integerp (len, 1))
13639 /* If LEN is not constant, try MAXLEN too.
13640 For MAXLEN only allow optimizing into non-_ocs function
13641 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13642 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13643 return NULL_TREE;
13645 else
13646 maxlen = len;
13648 if (tree_int_cst_lt (size, maxlen))
13649 return NULL_TREE;
13652 if (!init_target_chars ())
13653 return NULL_TREE;
13655 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13656 or if format doesn't contain % chars or is "%s". */
13657 if (! integer_zerop (flag))
13659 fmt_str = c_getstr (fmt);
13660 if (fmt_str == NULL)
13661 return NULL_TREE;
13662 if (strchr (fmt_str, target_percent) != NULL
13663 && strcmp (fmt_str, target_percent_s))
13664 return NULL_TREE;
13667 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13668 available. */
13669 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13670 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13671 if (!fn)
13672 return NULL_TREE;
13674 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13677 /* Builtins with folding operations that operate on "..." arguments
13678 need special handling; we need to store the arguments in a convenient
13679 data structure before attempting any folding. Fortunately there are
13680 only a few builtins that fall into this category. FNDECL is the
13681 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13682 result of the function call is ignored. */
13684 static tree
13685 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13687 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13688 tree ret = NULL_TREE;
13690 switch (fcode)
13692 case BUILT_IN_SPRINTF_CHK:
13693 case BUILT_IN_VSPRINTF_CHK:
13694 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13695 break;
13697 case BUILT_IN_SNPRINTF_CHK:
13698 case BUILT_IN_VSNPRINTF_CHK:
13699 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13701 default:
13702 break;
13704 if (ret)
13706 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13707 TREE_NO_WARNING (ret) = 1;
13708 return ret;
13710 return NULL_TREE;
13713 /* A wrapper function for builtin folding that prevents warnings for
13714 "statement without effect" and the like, caused by removing the
13715 call node earlier than the warning is generated. */
13717 tree
13718 fold_call_stmt (gimple stmt, bool ignore)
13720 tree ret = NULL_TREE;
13721 tree fndecl = gimple_call_fndecl (stmt);
13722 if (fndecl
13723 && TREE_CODE (fndecl) == FUNCTION_DECL
13724 && DECL_BUILT_IN (fndecl)
13725 && !gimple_call_va_arg_pack_p (stmt))
13727 int nargs = gimple_call_num_args (stmt);
13729 if (avoid_folding_inline_builtin (fndecl))
13730 return NULL_TREE;
13731 /* FIXME: Don't use a list in this interface. */
13732 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13734 tree arglist = NULL_TREE;
13735 int i;
13736 for (i = nargs - 1; i >= 0; i--)
13737 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13738 return targetm.fold_builtin (fndecl, arglist, ignore);
13740 else
13742 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13744 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13745 int i;
13746 for (i = 0; i < nargs; i++)
13747 args[i] = gimple_call_arg (stmt, i);
13748 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13750 if (!ret)
13751 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13752 if (ret)
13754 /* Propagate location information from original call to
13755 expansion of builtin. Otherwise things like
13756 maybe_emit_chk_warning, that operate on the expansion
13757 of a builtin, will use the wrong location information. */
13758 if (gimple_has_location (stmt))
13760 tree realret = ret;
13761 if (TREE_CODE (ret) == NOP_EXPR)
13762 realret = TREE_OPERAND (ret, 0);
13763 if (CAN_HAVE_LOCATION_P (realret)
13764 && !EXPR_HAS_LOCATION (realret))
13765 SET_EXPR_LOCATION (realret, gimple_location (stmt));
13766 return realret;
13768 return ret;
13772 return NULL_TREE;