re PR middle-end/39867 (Wrong result of conditional operator exp < 2 ? 2U : (unsigned...
[official-gcc.git] / gcc / builtins.c
blob9f0f4ac4fba1cd53384ed1b06be7ac034eb49740
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[4]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names[(int) END_BUILTINS] =
69 #include "builtins.def"
71 #undef DEF_BUILTIN
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 tree built_in_decls[(int) END_BUILTINS];
76 /* Declarations used when constructing the builtin implicitly in the compiler.
77 It may be NULL_TREE when this is invalid (for instance runtime is not
78 required to implement the function call in all cases). */
79 tree implicit_built_in_decls[(int) END_BUILTINS];
81 static const char *c_getstr (tree);
82 static rtx c_readstr (const char *, enum machine_mode);
83 static int target_char_cast (tree, char *);
84 static rtx get_memory_rtx (tree, tree);
85 static int apply_args_size (void);
86 static int apply_result_size (void);
87 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
88 static rtx result_vector (int, rtx);
89 #endif
90 static void expand_builtin_update_setjmp_buf (rtx);
91 static void expand_builtin_prefetch (tree);
92 static rtx expand_builtin_apply_args (void);
93 static rtx expand_builtin_apply_args_1 (void);
94 static rtx expand_builtin_apply (rtx, rtx, rtx);
95 static void expand_builtin_return (rtx);
96 static enum type_class type_to_class (tree);
97 static rtx expand_builtin_classify_type (tree);
98 static void expand_errno_check (tree, rtx);
99 static rtx expand_builtin_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_args_info (tree);
108 static rtx expand_builtin_next_arg (void);
109 static rtx expand_builtin_va_start (tree);
110 static rtx expand_builtin_va_end (tree);
111 static rtx expand_builtin_va_copy (tree);
112 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
126 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_bcopy (tree, int);
129 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
131 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
133 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
134 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_alloca (tree, rtx);
143 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
144 static rtx expand_builtin_frame_address (tree, tree);
145 static rtx expand_builtin_fputs (tree, rtx, bool);
146 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
147 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
148 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
149 static tree stabilize_va_list (tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static tree fold_builtin_constant_p (tree);
152 static tree fold_builtin_expect (tree, tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (tree);
155 static tree fold_builtin_inf (tree, int);
156 static tree fold_builtin_nan (tree, tree, int);
157 static tree rewrite_call_expr (tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static bool integer_valued_real_p (tree);
160 static tree fold_trunc_transparent_mathfn (tree, tree);
161 static bool readonly_data_expr (tree);
162 static rtx expand_builtin_fabs (tree, rtx, rtx);
163 static rtx expand_builtin_signbit (tree, rtx);
164 static tree fold_builtin_sqrt (tree, tree);
165 static tree fold_builtin_cbrt (tree, tree);
166 static tree fold_builtin_pow (tree, tree, tree, tree);
167 static tree fold_builtin_powi (tree, tree, tree, tree);
168 static tree fold_builtin_cos (tree, tree, tree);
169 static tree fold_builtin_cosh (tree, tree, tree);
170 static tree fold_builtin_tan (tree, tree);
171 static tree fold_builtin_trunc (tree, tree);
172 static tree fold_builtin_floor (tree, tree);
173 static tree fold_builtin_ceil (tree, tree);
174 static tree fold_builtin_round (tree, tree);
175 static tree fold_builtin_int_roundingfn (tree, tree);
176 static tree fold_builtin_bitop (tree, tree);
177 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
178 static tree fold_builtin_strchr (tree, tree, tree);
179 static tree fold_builtin_memchr (tree, tree, tree, tree);
180 static tree fold_builtin_memcmp (tree, tree, tree);
181 static tree fold_builtin_strcmp (tree, tree);
182 static tree fold_builtin_strncmp (tree, tree, tree);
183 static tree fold_builtin_signbit (tree, tree);
184 static tree fold_builtin_copysign (tree, tree, tree, tree);
185 static tree fold_builtin_isascii (tree);
186 static tree fold_builtin_toascii (tree);
187 static tree fold_builtin_isdigit (tree);
188 static tree fold_builtin_fabs (tree, tree);
189 static tree fold_builtin_abs (tree, tree);
190 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
191 enum tree_code);
192 static tree fold_builtin_n (tree, tree *, int, bool);
193 static tree fold_builtin_0 (tree, bool);
194 static tree fold_builtin_1 (tree, tree, bool);
195 static tree fold_builtin_2 (tree, tree, tree, bool);
196 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
197 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
198 static tree fold_builtin_varargs (tree, tree, bool);
200 static tree fold_builtin_strpbrk (tree, tree, tree);
201 static tree fold_builtin_strstr (tree, tree, tree);
202 static tree fold_builtin_strrchr (tree, tree, tree);
203 static tree fold_builtin_strcat (tree, tree);
204 static tree fold_builtin_strncat (tree, tree, tree);
205 static tree fold_builtin_strspn (tree, tree);
206 static tree fold_builtin_strcspn (tree, tree);
207 static tree fold_builtin_sprintf (tree, tree, tree, int);
209 static rtx expand_builtin_object_size (tree);
210 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
211 enum built_in_function);
212 static void maybe_emit_chk_warning (tree, enum built_in_function);
213 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
214 static void maybe_emit_free_warning (tree);
215 static tree fold_builtin_object_size (tree, tree);
216 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
217 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
218 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
219 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
220 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
221 enum built_in_function);
222 static bool init_target_chars (void);
224 static unsigned HOST_WIDE_INT target_newline;
225 static unsigned HOST_WIDE_INT target_percent;
226 static unsigned HOST_WIDE_INT target_c;
227 static unsigned HOST_WIDE_INT target_s;
228 static char target_percent_c[3];
229 static char target_percent_s[3];
230 static char target_percent_s_newline[4];
231 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
232 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
233 static tree do_mpfr_arg2 (tree, tree, tree,
234 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
235 static tree do_mpfr_arg3 (tree, tree, tree, tree,
236 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
237 static tree do_mpfr_sincos (tree, tree, tree);
238 static tree do_mpfr_bessel_n (tree, tree, tree,
239 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
240 const REAL_VALUE_TYPE *, bool);
241 static tree do_mpfr_remquo (tree, tree, tree);
242 static tree do_mpfr_lgamma_r (tree, tree, tree);
244 bool
245 is_builtin_name (const char *name)
247 if (strncmp (name, "__builtin_", 10) == 0)
248 return true;
249 if (strncmp (name, "__sync_", 7) == 0)
250 return true;
251 return false;
254 /* Return true if NODE should be considered for inline expansion regardless
255 of the optimization level. This means whenever a function is invoked with
256 its "internal" name, which normally contains the prefix "__builtin". */
258 static bool
259 called_as_built_in (tree node)
261 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
262 we want the name used to call the function, not the name it
263 will have. */
264 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
265 return is_builtin_name (name);
268 /* Return the alignment in bits of EXP, an object.
269 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
270 guessed alignment e.g. from type alignment. */
273 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
275 unsigned int inner;
277 inner = max_align;
278 if (handled_component_p (exp))
280 HOST_WIDE_INT bitsize, bitpos;
281 tree offset;
282 enum machine_mode mode;
283 int unsignedp, volatilep;
285 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
286 &mode, &unsignedp, &volatilep, true);
287 if (bitpos)
288 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
289 while (offset)
291 tree next_offset;
293 if (TREE_CODE (offset) == PLUS_EXPR)
295 next_offset = TREE_OPERAND (offset, 0);
296 offset = TREE_OPERAND (offset, 1);
298 else
299 next_offset = NULL;
300 if (host_integerp (offset, 1))
302 /* Any overflow in calculating offset_bits won't change
303 the alignment. */
304 unsigned offset_bits
305 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
307 if (offset_bits)
308 inner = MIN (inner, (offset_bits & -offset_bits));
310 else if (TREE_CODE (offset) == MULT_EXPR
311 && host_integerp (TREE_OPERAND (offset, 1), 1))
313 /* Any overflow in calculating offset_factor won't change
314 the alignment. */
315 unsigned offset_factor
316 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
317 * BITS_PER_UNIT);
319 if (offset_factor)
320 inner = MIN (inner, (offset_factor & -offset_factor));
322 else
324 inner = MIN (inner, BITS_PER_UNIT);
325 break;
327 offset = next_offset;
330 if (DECL_P (exp))
331 align = MIN (inner, DECL_ALIGN (exp));
332 #ifdef CONSTANT_ALIGNMENT
333 else if (CONSTANT_CLASS_P (exp))
334 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
335 #endif
336 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
337 || TREE_CODE (exp) == INDIRECT_REF)
338 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
339 else
340 align = MIN (align, inner);
341 return MIN (align, max_align);
344 /* Return the alignment in bits of EXP, a pointer valued expression.
345 But don't return more than MAX_ALIGN no matter what.
346 The alignment returned is, by default, the alignment of the thing that
347 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
349 Otherwise, look at the expression to see if we can do better, i.e., if the
350 expression is actually pointing at an object whose alignment is tighter. */
353 get_pointer_alignment (tree exp, unsigned int max_align)
355 unsigned int align, inner;
357 /* We rely on TER to compute accurate alignment information. */
358 if (!(optimize && flag_tree_ter))
359 return 0;
361 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
362 return 0;
364 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
365 align = MIN (align, max_align);
367 while (1)
369 switch (TREE_CODE (exp))
371 CASE_CONVERT:
372 exp = TREE_OPERAND (exp, 0);
373 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
374 return align;
376 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
377 align = MIN (inner, max_align);
378 break;
380 case POINTER_PLUS_EXPR:
381 /* If sum of pointer + int, restrict our maximum alignment to that
382 imposed by the integer. If not, we can't do any better than
383 ALIGN. */
384 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
385 return align;
387 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
388 & (max_align / BITS_PER_UNIT - 1))
389 != 0)
390 max_align >>= 1;
392 exp = TREE_OPERAND (exp, 0);
393 break;
395 case ADDR_EXPR:
396 /* See what we are pointing at and look at its alignment. */
397 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
399 default:
400 return align;
405 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
406 way, because it could contain a zero byte in the middle.
407 TREE_STRING_LENGTH is the size of the character array, not the string.
409 ONLY_VALUE should be nonzero if the result is not going to be emitted
410 into the instruction stream and zero if it is going to be expanded.
411 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
412 is returned, otherwise NULL, since
413 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
414 evaluate the side-effects.
416 The value returned is of type `ssizetype'.
418 Unfortunately, string_constant can't access the values of const char
419 arrays with initializers, so neither can we do so here. */
421 tree
422 c_strlen (tree src, int only_value)
424 tree offset_node;
425 HOST_WIDE_INT offset;
426 int max;
427 const char *ptr;
429 STRIP_NOPS (src);
430 if (TREE_CODE (src) == COND_EXPR
431 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
433 tree len1, len2;
435 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
436 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
437 if (tree_int_cst_equal (len1, len2))
438 return len1;
441 if (TREE_CODE (src) == COMPOUND_EXPR
442 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
443 return c_strlen (TREE_OPERAND (src, 1), only_value);
445 src = string_constant (src, &offset_node);
446 if (src == 0)
447 return NULL_TREE;
449 max = TREE_STRING_LENGTH (src) - 1;
450 ptr = TREE_STRING_POINTER (src);
452 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
454 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
455 compute the offset to the following null if we don't know where to
456 start searching for it. */
457 int i;
459 for (i = 0; i < max; i++)
460 if (ptr[i] == 0)
461 return NULL_TREE;
463 /* We don't know the starting offset, but we do know that the string
464 has no internal zero bytes. We can assume that the offset falls
465 within the bounds of the string; otherwise, the programmer deserves
466 what he gets. Subtract the offset from the length of the string,
467 and return that. This would perhaps not be valid if we were dealing
468 with named arrays in addition to literal string constants. */
470 return size_diffop (size_int (max), offset_node);
473 /* We have a known offset into the string. Start searching there for
474 a null character if we can represent it as a single HOST_WIDE_INT. */
475 if (offset_node == 0)
476 offset = 0;
477 else if (! host_integerp (offset_node, 0))
478 offset = -1;
479 else
480 offset = tree_low_cst (offset_node, 0);
482 /* If the offset is known to be out of bounds, the front-end should
483 have warned already. We call strlen at runtime.
485 ??? Perhaps we should turn this into an assert and force
486 front-ends to define offsets whtin boundaries. */
487 if (offset < 0 || offset > max)
489 return NULL_TREE;
492 /* Use strlen to search for the first zero byte. Since any strings
493 constructed with build_string will have nulls appended, we win even
494 if we get handed something like (char[4])"abcd".
496 Since OFFSET is our starting index into the string, no further
497 calculation is needed. */
498 return ssize_int (strlen (ptr + offset));
501 /* Return a char pointer for a C string if it is a string constant
502 or sum of string constant and integer constant. */
504 static const char *
505 c_getstr (tree src)
507 tree offset_node;
509 src = string_constant (src, &offset_node);
510 if (src == 0)
511 return 0;
513 if (offset_node == 0)
514 return TREE_STRING_POINTER (src);
515 else if (!host_integerp (offset_node, 1)
516 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
517 return 0;
519 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
522 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
523 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
525 static rtx
526 c_readstr (const char *str, enum machine_mode mode)
528 HOST_WIDE_INT c[2];
529 HOST_WIDE_INT ch;
530 unsigned int i, j;
532 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
534 c[0] = 0;
535 c[1] = 0;
536 ch = 1;
537 for (i = 0; i < GET_MODE_SIZE (mode); i++)
539 j = i;
540 if (WORDS_BIG_ENDIAN)
541 j = GET_MODE_SIZE (mode) - i - 1;
542 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
543 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
544 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
545 j *= BITS_PER_UNIT;
546 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
548 if (ch)
549 ch = (unsigned char) str[i];
550 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
552 return immed_double_const (c[0], c[1], mode);
555 /* Cast a target constant CST to target CHAR and if that value fits into
556 host char type, return zero and put that value into variable pointed to by
557 P. */
559 static int
560 target_char_cast (tree cst, char *p)
562 unsigned HOST_WIDE_INT val, hostval;
564 if (!host_integerp (cst, 1)
565 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
566 return 1;
568 val = tree_low_cst (cst, 1);
569 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
570 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
572 hostval = val;
573 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
574 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
576 if (val != hostval)
577 return 1;
579 *p = hostval;
580 return 0;
583 /* Similar to save_expr, but assumes that arbitrary code is not executed
584 in between the multiple evaluations. In particular, we assume that a
585 non-addressable local variable will not be modified. */
587 static tree
588 builtin_save_expr (tree exp)
590 if (TREE_ADDRESSABLE (exp) == 0
591 && (TREE_CODE (exp) == PARM_DECL
592 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
593 return exp;
595 return save_expr (exp);
598 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
599 times to get the address of either a higher stack frame, or a return
600 address located within it (depending on FNDECL_CODE). */
602 static rtx
603 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
605 int i;
607 #ifdef INITIAL_FRAME_ADDRESS_RTX
608 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
609 #else
610 rtx tem;
612 /* For a zero count with __builtin_return_address, we don't care what
613 frame address we return, because target-specific definitions will
614 override us. Therefore frame pointer elimination is OK, and using
615 the soft frame pointer is OK.
617 For a nonzero count, or a zero count with __builtin_frame_address,
618 we require a stable offset from the current frame pointer to the
619 previous one, so we must use the hard frame pointer, and
620 we must disable frame pointer elimination. */
621 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
622 tem = frame_pointer_rtx;
623 else
625 tem = hard_frame_pointer_rtx;
627 /* Tell reload not to eliminate the frame pointer. */
628 crtl->accesses_prior_frames = 1;
630 #endif
632 /* Some machines need special handling before we can access
633 arbitrary frames. For example, on the SPARC, we must first flush
634 all register windows to the stack. */
635 #ifdef SETUP_FRAME_ADDRESSES
636 if (count > 0)
637 SETUP_FRAME_ADDRESSES ();
638 #endif
640 /* On the SPARC, the return address is not in the frame, it is in a
641 register. There is no way to access it off of the current frame
642 pointer, but it can be accessed off the previous frame pointer by
643 reading the value from the register window save area. */
644 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
645 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
646 count--;
647 #endif
649 /* Scan back COUNT frames to the specified frame. */
650 for (i = 0; i < count; i++)
652 /* Assume the dynamic chain pointer is in the word that the
653 frame address points to, unless otherwise specified. */
654 #ifdef DYNAMIC_CHAIN_ADDRESS
655 tem = DYNAMIC_CHAIN_ADDRESS (tem);
656 #endif
657 tem = memory_address (Pmode, tem);
658 tem = gen_frame_mem (Pmode, tem);
659 tem = copy_to_reg (tem);
662 /* For __builtin_frame_address, return what we've got. But, on
663 the SPARC for example, we may have to add a bias. */
664 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
665 #ifdef FRAME_ADDR_RTX
666 return FRAME_ADDR_RTX (tem);
667 #else
668 return tem;
669 #endif
671 /* For __builtin_return_address, get the return address from that frame. */
672 #ifdef RETURN_ADDR_RTX
673 tem = RETURN_ADDR_RTX (count, tem);
674 #else
675 tem = memory_address (Pmode,
676 plus_constant (tem, GET_MODE_SIZE (Pmode)));
677 tem = gen_frame_mem (Pmode, tem);
678 #endif
679 return tem;
682 /* Alias set used for setjmp buffer. */
683 static alias_set_type setjmp_alias_set = -1;
685 /* Construct the leading half of a __builtin_setjmp call. Control will
686 return to RECEIVER_LABEL. This is also called directly by the SJLJ
687 exception handling code. */
689 void
690 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
692 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
693 rtx stack_save;
694 rtx mem;
696 if (setjmp_alias_set == -1)
697 setjmp_alias_set = new_alias_set ();
699 buf_addr = convert_memory_address (Pmode, buf_addr);
701 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
703 /* We store the frame pointer and the address of receiver_label in
704 the buffer and use the rest of it for the stack save area, which
705 is machine-dependent. */
707 mem = gen_rtx_MEM (Pmode, buf_addr);
708 set_mem_alias_set (mem, setjmp_alias_set);
709 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
711 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
712 set_mem_alias_set (mem, setjmp_alias_set);
714 emit_move_insn (validize_mem (mem),
715 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
717 stack_save = gen_rtx_MEM (sa_mode,
718 plus_constant (buf_addr,
719 2 * GET_MODE_SIZE (Pmode)));
720 set_mem_alias_set (stack_save, setjmp_alias_set);
721 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
723 /* If there is further processing to do, do it. */
724 #ifdef HAVE_builtin_setjmp_setup
725 if (HAVE_builtin_setjmp_setup)
726 emit_insn (gen_builtin_setjmp_setup (buf_addr));
727 #endif
729 /* Tell optimize_save_area_alloca that extra work is going to
730 need to go on during alloca. */
731 cfun->calls_setjmp = 1;
733 /* We have a nonlocal label. */
734 cfun->has_nonlocal_label = 1;
737 /* Construct the trailing part of a __builtin_setjmp call. This is
738 also called directly by the SJLJ exception handling code. */
740 void
741 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
743 /* Clobber the FP when we get here, so we have to make sure it's
744 marked as used by this function. */
745 emit_use (hard_frame_pointer_rtx);
747 /* Mark the static chain as clobbered here so life information
748 doesn't get messed up for it. */
749 emit_clobber (static_chain_rtx);
751 /* Now put in the code to restore the frame pointer, and argument
752 pointer, if needed. */
753 #ifdef HAVE_nonlocal_goto
754 if (! HAVE_nonlocal_goto)
755 #endif
757 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
758 /* This might change the hard frame pointer in ways that aren't
759 apparent to early optimization passes, so force a clobber. */
760 emit_clobber (hard_frame_pointer_rtx);
763 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
764 if (fixed_regs[ARG_POINTER_REGNUM])
766 #ifdef ELIMINABLE_REGS
767 size_t i;
768 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
770 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
771 if (elim_regs[i].from == ARG_POINTER_REGNUM
772 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
773 break;
775 if (i == ARRAY_SIZE (elim_regs))
776 #endif
778 /* Now restore our arg pointer from the address at which it
779 was saved in our stack frame. */
780 emit_move_insn (crtl->args.internal_arg_pointer,
781 copy_to_reg (get_arg_pointer_save_area ()));
784 #endif
786 #ifdef HAVE_builtin_setjmp_receiver
787 if (HAVE_builtin_setjmp_receiver)
788 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
789 else
790 #endif
791 #ifdef HAVE_nonlocal_goto_receiver
792 if (HAVE_nonlocal_goto_receiver)
793 emit_insn (gen_nonlocal_goto_receiver ());
794 else
795 #endif
796 { /* Nothing */ }
798 /* We must not allow the code we just generated to be reordered by
799 scheduling. Specifically, the update of the frame pointer must
800 happen immediately, not later. */
801 emit_insn (gen_blockage ());
804 /* __builtin_longjmp is passed a pointer to an array of five words (not
805 all will be used on all machines). It operates similarly to the C
806 library function of the same name, but is more efficient. Much of
807 the code below is copied from the handling of non-local gotos. */
809 static void
810 expand_builtin_longjmp (rtx buf_addr, rtx value)
812 rtx fp, lab, stack, insn, last;
813 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
815 /* DRAP is needed for stack realign if longjmp is expanded to current
816 function */
817 if (SUPPORTS_STACK_ALIGNMENT)
818 crtl->need_drap = true;
820 if (setjmp_alias_set == -1)
821 setjmp_alias_set = new_alias_set ();
823 buf_addr = convert_memory_address (Pmode, buf_addr);
825 buf_addr = force_reg (Pmode, buf_addr);
827 /* We used to store value in static_chain_rtx, but that fails if pointers
828 are smaller than integers. We instead require that the user must pass
829 a second argument of 1, because that is what builtin_setjmp will
830 return. This also makes EH slightly more efficient, since we are no
831 longer copying around a value that we don't care about. */
832 gcc_assert (value == const1_rtx);
834 last = get_last_insn ();
835 #ifdef HAVE_builtin_longjmp
836 if (HAVE_builtin_longjmp)
837 emit_insn (gen_builtin_longjmp (buf_addr));
838 else
839 #endif
841 fp = gen_rtx_MEM (Pmode, buf_addr);
842 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
843 GET_MODE_SIZE (Pmode)));
845 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
846 2 * GET_MODE_SIZE (Pmode)));
847 set_mem_alias_set (fp, setjmp_alias_set);
848 set_mem_alias_set (lab, setjmp_alias_set);
849 set_mem_alias_set (stack, setjmp_alias_set);
851 /* Pick up FP, label, and SP from the block and jump. This code is
852 from expand_goto in stmt.c; see there for detailed comments. */
853 #ifdef HAVE_nonlocal_goto
854 if (HAVE_nonlocal_goto)
855 /* We have to pass a value to the nonlocal_goto pattern that will
856 get copied into the static_chain pointer, but it does not matter
857 what that value is, because builtin_setjmp does not use it. */
858 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
859 else
860 #endif
862 lab = copy_to_reg (lab);
864 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
865 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
867 emit_move_insn (hard_frame_pointer_rtx, fp);
868 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
870 emit_use (hard_frame_pointer_rtx);
871 emit_use (stack_pointer_rtx);
872 emit_indirect_jump (lab);
876 /* Search backwards and mark the jump insn as a non-local goto.
877 Note that this precludes the use of __builtin_longjmp to a
878 __builtin_setjmp target in the same function. However, we've
879 already cautioned the user that these functions are for
880 internal exception handling use only. */
881 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
883 gcc_assert (insn != last);
885 if (JUMP_P (insn))
887 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
888 break;
890 else if (CALL_P (insn))
891 break;
895 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
896 and the address of the save area. */
898 static rtx
899 expand_builtin_nonlocal_goto (tree exp)
901 tree t_label, t_save_area;
902 rtx r_label, r_save_area, r_fp, r_sp, insn;
904 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
905 return NULL_RTX;
907 t_label = CALL_EXPR_ARG (exp, 0);
908 t_save_area = CALL_EXPR_ARG (exp, 1);
910 r_label = expand_normal (t_label);
911 r_label = convert_memory_address (Pmode, r_label);
912 r_save_area = expand_normal (t_save_area);
913 r_save_area = convert_memory_address (Pmode, r_save_area);
914 /* Copy the address of the save location to a register just in case it was based
915 on the frame pointer. */
916 r_save_area = copy_to_reg (r_save_area);
917 r_fp = gen_rtx_MEM (Pmode, r_save_area);
918 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
919 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
921 crtl->has_nonlocal_goto = 1;
923 #ifdef HAVE_nonlocal_goto
924 /* ??? We no longer need to pass the static chain value, afaik. */
925 if (HAVE_nonlocal_goto)
926 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
927 else
928 #endif
930 r_label = copy_to_reg (r_label);
932 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
933 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
935 /* Restore frame pointer for containing function.
936 This sets the actual hard register used for the frame pointer
937 to the location of the function's incoming static chain info.
938 The non-local goto handler will then adjust it to contain the
939 proper value and reload the argument pointer, if needed. */
940 emit_move_insn (hard_frame_pointer_rtx, r_fp);
941 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
943 /* USE of hard_frame_pointer_rtx added for consistency;
944 not clear if really needed. */
945 emit_use (hard_frame_pointer_rtx);
946 emit_use (stack_pointer_rtx);
948 /* If the architecture is using a GP register, we must
949 conservatively assume that the target function makes use of it.
950 The prologue of functions with nonlocal gotos must therefore
951 initialize the GP register to the appropriate value, and we
952 must then make sure that this value is live at the point
953 of the jump. (Note that this doesn't necessarily apply
954 to targets with a nonlocal_goto pattern; they are free
955 to implement it in their own way. Note also that this is
956 a no-op if the GP register is a global invariant.) */
957 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
958 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
959 emit_use (pic_offset_table_rtx);
961 emit_indirect_jump (r_label);
964 /* Search backwards to the jump insn and mark it as a
965 non-local goto. */
966 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
968 if (JUMP_P (insn))
970 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
971 break;
973 else if (CALL_P (insn))
974 break;
977 return const0_rtx;
980 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
981 (not all will be used on all machines) that was passed to __builtin_setjmp.
982 It updates the stack pointer in that block to correspond to the current
983 stack pointer. */
985 static void
986 expand_builtin_update_setjmp_buf (rtx buf_addr)
988 enum machine_mode sa_mode = Pmode;
989 rtx stack_save;
992 #ifdef HAVE_save_stack_nonlocal
993 if (HAVE_save_stack_nonlocal)
994 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
995 #endif
996 #ifdef STACK_SAVEAREA_MODE
997 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
998 #endif
1000 stack_save
1001 = gen_rtx_MEM (sa_mode,
1002 memory_address
1003 (sa_mode,
1004 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1006 #ifdef HAVE_setjmp
1007 if (HAVE_setjmp)
1008 emit_insn (gen_setjmp ());
1009 #endif
1011 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1014 /* Expand a call to __builtin_prefetch. For a target that does not support
1015 data prefetch, evaluate the memory address argument in case it has side
1016 effects. */
1018 static void
1019 expand_builtin_prefetch (tree exp)
1021 tree arg0, arg1, arg2;
1022 int nargs;
1023 rtx op0, op1, op2;
1025 if (!validate_arglist (exp, POINTER_TYPE, 0))
1026 return;
1028 arg0 = CALL_EXPR_ARG (exp, 0);
1030 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1031 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1032 locality). */
1033 nargs = call_expr_nargs (exp);
1034 if (nargs > 1)
1035 arg1 = CALL_EXPR_ARG (exp, 1);
1036 else
1037 arg1 = integer_zero_node;
1038 if (nargs > 2)
1039 arg2 = CALL_EXPR_ARG (exp, 2);
1040 else
1041 arg2 = build_int_cst (NULL_TREE, 3);
1043 /* Argument 0 is an address. */
1044 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1046 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1047 if (TREE_CODE (arg1) != INTEGER_CST)
1049 error ("second argument to %<__builtin_prefetch%> must be a constant");
1050 arg1 = integer_zero_node;
1052 op1 = expand_normal (arg1);
1053 /* Argument 1 must be either zero or one. */
1054 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1056 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1057 " using zero");
1058 op1 = const0_rtx;
1061 /* Argument 2 (locality) must be a compile-time constant int. */
1062 if (TREE_CODE (arg2) != INTEGER_CST)
1064 error ("third argument to %<__builtin_prefetch%> must be a constant");
1065 arg2 = integer_zero_node;
1067 op2 = expand_normal (arg2);
1068 /* Argument 2 must be 0, 1, 2, or 3. */
1069 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1071 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1072 op2 = const0_rtx;
1075 #ifdef HAVE_prefetch
1076 if (HAVE_prefetch)
1078 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1079 (op0,
1080 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1081 || (GET_MODE (op0) != Pmode))
1083 op0 = convert_memory_address (Pmode, op0);
1084 op0 = force_reg (Pmode, op0);
1086 emit_insn (gen_prefetch (op0, op1, op2));
1088 #endif
1090 /* Don't do anything with direct references to volatile memory, but
1091 generate code to handle other side effects. */
1092 if (!MEM_P (op0) && side_effects_p (op0))
1093 emit_insn (op0);
1096 /* Get a MEM rtx for expression EXP which is the address of an operand
1097 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1098 the maximum length of the block of memory that might be accessed or
1099 NULL if unknown. */
1101 static rtx
1102 get_memory_rtx (tree exp, tree len)
1104 tree orig_exp = exp;
1105 rtx addr, mem;
1106 HOST_WIDE_INT off;
1108 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1109 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1110 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1111 exp = TREE_OPERAND (exp, 0);
1113 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1114 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1116 /* Get an expression we can use to find the attributes to assign to MEM.
1117 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1118 we can. First remove any nops. */
1119 while (CONVERT_EXPR_P (exp)
1120 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1121 exp = TREE_OPERAND (exp, 0);
1123 off = 0;
1124 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1125 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1126 && host_integerp (TREE_OPERAND (exp, 1), 0)
1127 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1128 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1129 else if (TREE_CODE (exp) == ADDR_EXPR)
1130 exp = TREE_OPERAND (exp, 0);
1131 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1132 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1133 else
1134 exp = NULL;
1136 /* Honor attributes derived from exp, except for the alias set
1137 (as builtin stringops may alias with anything) and the size
1138 (as stringops may access multiple array elements). */
1139 if (exp)
1141 set_mem_attributes (mem, exp, 0);
1143 if (off)
1144 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1146 /* Allow the string and memory builtins to overflow from one
1147 field into another, see http://gcc.gnu.org/PR23561.
1148 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1149 memory accessed by the string or memory builtin will fit
1150 within the field. */
1151 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1153 tree mem_expr = MEM_EXPR (mem);
1154 HOST_WIDE_INT offset = -1, length = -1;
1155 tree inner = exp;
1157 while (TREE_CODE (inner) == ARRAY_REF
1158 || CONVERT_EXPR_P (inner)
1159 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1160 || TREE_CODE (inner) == SAVE_EXPR)
1161 inner = TREE_OPERAND (inner, 0);
1163 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1165 if (MEM_OFFSET (mem)
1166 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1167 offset = INTVAL (MEM_OFFSET (mem));
1169 if (offset >= 0 && len && host_integerp (len, 0))
1170 length = tree_low_cst (len, 0);
1172 while (TREE_CODE (inner) == COMPONENT_REF)
1174 tree field = TREE_OPERAND (inner, 1);
1175 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1176 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1178 /* Bitfields are generally not byte-addressable. */
1179 gcc_assert (!DECL_BIT_FIELD (field)
1180 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1181 % BITS_PER_UNIT) == 0
1182 && host_integerp (DECL_SIZE (field), 0)
1183 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1184 % BITS_PER_UNIT) == 0));
1186 /* If we can prove that the memory starting at XEXP (mem, 0) and
1187 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1188 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1189 fields without DECL_SIZE_UNIT like flexible array members. */
1190 if (length >= 0
1191 && DECL_SIZE_UNIT (field)
1192 && host_integerp (DECL_SIZE_UNIT (field), 0))
1194 HOST_WIDE_INT size
1195 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1196 if (offset <= size
1197 && length <= size
1198 && offset + length <= size)
1199 break;
1202 if (offset >= 0
1203 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1204 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1205 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1206 / BITS_PER_UNIT;
1207 else
1209 offset = -1;
1210 length = -1;
1213 mem_expr = TREE_OPERAND (mem_expr, 0);
1214 inner = TREE_OPERAND (inner, 0);
1217 if (mem_expr == NULL)
1218 offset = -1;
1219 if (mem_expr != MEM_EXPR (mem))
1221 set_mem_expr (mem, mem_expr);
1222 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1225 set_mem_alias_set (mem, 0);
1226 set_mem_size (mem, NULL_RTX);
1229 return mem;
1232 /* Built-in functions to perform an untyped call and return. */
1234 /* For each register that may be used for calling a function, this
1235 gives a mode used to copy the register's value. VOIDmode indicates
1236 the register is not used for calling a function. If the machine
1237 has register windows, this gives only the outbound registers.
1238 INCOMING_REGNO gives the corresponding inbound register. */
1239 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1241 /* For each register that may be used for returning values, this gives
1242 a mode used to copy the register's value. VOIDmode indicates the
1243 register is not used for returning values. If the machine has
1244 register windows, this gives only the outbound registers.
1245 INCOMING_REGNO gives the corresponding inbound register. */
1246 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1248 /* For each register that may be used for calling a function, this
1249 gives the offset of that register into the block returned by
1250 __builtin_apply_args. 0 indicates that the register is not
1251 used for calling a function. */
1252 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1254 /* Return the size required for the block returned by __builtin_apply_args,
1255 and initialize apply_args_mode. */
1257 static int
1258 apply_args_size (void)
1260 static int size = -1;
1261 int align;
1262 unsigned int regno;
1263 enum machine_mode mode;
1265 /* The values computed by this function never change. */
1266 if (size < 0)
1268 /* The first value is the incoming arg-pointer. */
1269 size = GET_MODE_SIZE (Pmode);
1271 /* The second value is the structure value address unless this is
1272 passed as an "invisible" first argument. */
1273 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1274 size += GET_MODE_SIZE (Pmode);
1276 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1277 if (FUNCTION_ARG_REGNO_P (regno))
1279 mode = reg_raw_mode[regno];
1281 gcc_assert (mode != VOIDmode);
1283 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1284 if (size % align != 0)
1285 size = CEIL (size, align) * align;
1286 apply_args_reg_offset[regno] = size;
1287 size += GET_MODE_SIZE (mode);
1288 apply_args_mode[regno] = mode;
1290 else
1292 apply_args_mode[regno] = VOIDmode;
1293 apply_args_reg_offset[regno] = 0;
1296 return size;
1299 /* Return the size required for the block returned by __builtin_apply,
1300 and initialize apply_result_mode. */
1302 static int
1303 apply_result_size (void)
1305 static int size = -1;
1306 int align, regno;
1307 enum machine_mode mode;
1309 /* The values computed by this function never change. */
1310 if (size < 0)
1312 size = 0;
1314 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1315 if (FUNCTION_VALUE_REGNO_P (regno))
1317 mode = reg_raw_mode[regno];
1319 gcc_assert (mode != VOIDmode);
1321 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1322 if (size % align != 0)
1323 size = CEIL (size, align) * align;
1324 size += GET_MODE_SIZE (mode);
1325 apply_result_mode[regno] = mode;
1327 else
1328 apply_result_mode[regno] = VOIDmode;
1330 /* Allow targets that use untyped_call and untyped_return to override
1331 the size so that machine-specific information can be stored here. */
1332 #ifdef APPLY_RESULT_SIZE
1333 size = APPLY_RESULT_SIZE;
1334 #endif
1336 return size;
1339 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1340 /* Create a vector describing the result block RESULT. If SAVEP is true,
1341 the result block is used to save the values; otherwise it is used to
1342 restore the values. */
1344 static rtx
1345 result_vector (int savep, rtx result)
1347 int regno, size, align, nelts;
1348 enum machine_mode mode;
1349 rtx reg, mem;
1350 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1352 size = nelts = 0;
1353 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1354 if ((mode = apply_result_mode[regno]) != VOIDmode)
1356 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1357 if (size % align != 0)
1358 size = CEIL (size, align) * align;
1359 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1360 mem = adjust_address (result, mode, size);
1361 savevec[nelts++] = (savep
1362 ? gen_rtx_SET (VOIDmode, mem, reg)
1363 : gen_rtx_SET (VOIDmode, reg, mem));
1364 size += GET_MODE_SIZE (mode);
1366 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1368 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1370 /* Save the state required to perform an untyped call with the same
1371 arguments as were passed to the current function. */
1373 static rtx
1374 expand_builtin_apply_args_1 (void)
1376 rtx registers, tem;
1377 int size, align, regno;
1378 enum machine_mode mode;
1379 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1381 /* Create a block where the arg-pointer, structure value address,
1382 and argument registers can be saved. */
1383 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1385 /* Walk past the arg-pointer and structure value address. */
1386 size = GET_MODE_SIZE (Pmode);
1387 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1388 size += GET_MODE_SIZE (Pmode);
1390 /* Save each register used in calling a function to the block. */
1391 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1392 if ((mode = apply_args_mode[regno]) != VOIDmode)
1394 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1395 if (size % align != 0)
1396 size = CEIL (size, align) * align;
1398 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1400 emit_move_insn (adjust_address (registers, mode, size), tem);
1401 size += GET_MODE_SIZE (mode);
1404 /* Save the arg pointer to the block. */
1405 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1406 #ifdef STACK_GROWS_DOWNWARD
1407 /* We need the pointer as the caller actually passed them to us, not
1408 as we might have pretended they were passed. Make sure it's a valid
1409 operand, as emit_move_insn isn't expected to handle a PLUS. */
1411 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1412 NULL_RTX);
1413 #endif
1414 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1416 size = GET_MODE_SIZE (Pmode);
1418 /* Save the structure value address unless this is passed as an
1419 "invisible" first argument. */
1420 if (struct_incoming_value)
1422 emit_move_insn (adjust_address (registers, Pmode, size),
1423 copy_to_reg (struct_incoming_value));
1424 size += GET_MODE_SIZE (Pmode);
1427 /* Return the address of the block. */
1428 return copy_addr_to_reg (XEXP (registers, 0));
1431 /* __builtin_apply_args returns block of memory allocated on
1432 the stack into which is stored the arg pointer, structure
1433 value address, static chain, and all the registers that might
1434 possibly be used in performing a function call. The code is
1435 moved to the start of the function so the incoming values are
1436 saved. */
1438 static rtx
1439 expand_builtin_apply_args (void)
1441 /* Don't do __builtin_apply_args more than once in a function.
1442 Save the result of the first call and reuse it. */
1443 if (apply_args_value != 0)
1444 return apply_args_value;
1446 /* When this function is called, it means that registers must be
1447 saved on entry to this function. So we migrate the
1448 call to the first insn of this function. */
1449 rtx temp;
1450 rtx seq;
1452 start_sequence ();
1453 temp = expand_builtin_apply_args_1 ();
1454 seq = get_insns ();
1455 end_sequence ();
1457 apply_args_value = temp;
1459 /* Put the insns after the NOTE that starts the function.
1460 If this is inside a start_sequence, make the outer-level insn
1461 chain current, so the code is placed at the start of the
1462 function. If internal_arg_pointer is a non-virtual pseudo,
1463 it needs to be placed after the function that initializes
1464 that pseudo. */
1465 push_topmost_sequence ();
1466 if (REG_P (crtl->args.internal_arg_pointer)
1467 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1468 emit_insn_before (seq, parm_birth_insn);
1469 else
1470 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1471 pop_topmost_sequence ();
1472 return temp;
1476 /* Perform an untyped call and save the state required to perform an
1477 untyped return of whatever value was returned by the given function. */
1479 static rtx
1480 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1482 int size, align, regno;
1483 enum machine_mode mode;
1484 rtx incoming_args, result, reg, dest, src, call_insn;
1485 rtx old_stack_level = 0;
1486 rtx call_fusage = 0;
1487 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1489 arguments = convert_memory_address (Pmode, arguments);
1491 /* Create a block where the return registers can be saved. */
1492 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1494 /* Fetch the arg pointer from the ARGUMENTS block. */
1495 incoming_args = gen_reg_rtx (Pmode);
1496 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1497 #ifndef STACK_GROWS_DOWNWARD
1498 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1499 incoming_args, 0, OPTAB_LIB_WIDEN);
1500 #endif
1502 /* Push a new argument block and copy the arguments. Do not allow
1503 the (potential) memcpy call below to interfere with our stack
1504 manipulations. */
1505 do_pending_stack_adjust ();
1506 NO_DEFER_POP;
1508 /* Save the stack with nonlocal if available. */
1509 #ifdef HAVE_save_stack_nonlocal
1510 if (HAVE_save_stack_nonlocal)
1511 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1512 else
1513 #endif
1514 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1516 /* Allocate a block of memory onto the stack and copy the memory
1517 arguments to the outgoing arguments address. */
1518 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1520 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1521 may have already set current_function_calls_alloca to true.
1522 current_function_calls_alloca won't be set if argsize is zero,
1523 so we have to guarantee need_drap is true here. */
1524 if (SUPPORTS_STACK_ALIGNMENT)
1525 crtl->need_drap = true;
1527 dest = virtual_outgoing_args_rtx;
1528 #ifndef STACK_GROWS_DOWNWARD
1529 if (GET_CODE (argsize) == CONST_INT)
1530 dest = plus_constant (dest, -INTVAL (argsize));
1531 else
1532 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1533 #endif
1534 dest = gen_rtx_MEM (BLKmode, dest);
1535 set_mem_align (dest, PARM_BOUNDARY);
1536 src = gen_rtx_MEM (BLKmode, incoming_args);
1537 set_mem_align (src, PARM_BOUNDARY);
1538 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1540 /* Refer to the argument block. */
1541 apply_args_size ();
1542 arguments = gen_rtx_MEM (BLKmode, arguments);
1543 set_mem_align (arguments, PARM_BOUNDARY);
1545 /* Walk past the arg-pointer and structure value address. */
1546 size = GET_MODE_SIZE (Pmode);
1547 if (struct_value)
1548 size += GET_MODE_SIZE (Pmode);
1550 /* Restore each of the registers previously saved. Make USE insns
1551 for each of these registers for use in making the call. */
1552 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1553 if ((mode = apply_args_mode[regno]) != VOIDmode)
1555 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1556 if (size % align != 0)
1557 size = CEIL (size, align) * align;
1558 reg = gen_rtx_REG (mode, regno);
1559 emit_move_insn (reg, adjust_address (arguments, mode, size));
1560 use_reg (&call_fusage, reg);
1561 size += GET_MODE_SIZE (mode);
1564 /* Restore the structure value address unless this is passed as an
1565 "invisible" first argument. */
1566 size = GET_MODE_SIZE (Pmode);
1567 if (struct_value)
1569 rtx value = gen_reg_rtx (Pmode);
1570 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1571 emit_move_insn (struct_value, value);
1572 if (REG_P (struct_value))
1573 use_reg (&call_fusage, struct_value);
1574 size += GET_MODE_SIZE (Pmode);
1577 /* All arguments and registers used for the call are set up by now! */
1578 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1580 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1581 and we don't want to load it into a register as an optimization,
1582 because prepare_call_address already did it if it should be done. */
1583 if (GET_CODE (function) != SYMBOL_REF)
1584 function = memory_address (FUNCTION_MODE, function);
1586 /* Generate the actual call instruction and save the return value. */
1587 #ifdef HAVE_untyped_call
1588 if (HAVE_untyped_call)
1589 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1590 result, result_vector (1, result)));
1591 else
1592 #endif
1593 #ifdef HAVE_call_value
1594 if (HAVE_call_value)
1596 rtx valreg = 0;
1598 /* Locate the unique return register. It is not possible to
1599 express a call that sets more than one return register using
1600 call_value; use untyped_call for that. In fact, untyped_call
1601 only needs to save the return registers in the given block. */
1602 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1603 if ((mode = apply_result_mode[regno]) != VOIDmode)
1605 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1607 valreg = gen_rtx_REG (mode, regno);
1610 emit_call_insn (GEN_CALL_VALUE (valreg,
1611 gen_rtx_MEM (FUNCTION_MODE, function),
1612 const0_rtx, NULL_RTX, const0_rtx));
1614 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1616 else
1617 #endif
1618 gcc_unreachable ();
1620 /* Find the CALL insn we just emitted, and attach the register usage
1621 information. */
1622 call_insn = last_call_insn ();
1623 add_function_usage_to (call_insn, call_fusage);
1625 /* Restore the stack. */
1626 #ifdef HAVE_save_stack_nonlocal
1627 if (HAVE_save_stack_nonlocal)
1628 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1629 else
1630 #endif
1631 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1633 OK_DEFER_POP;
1635 /* Return the address of the result block. */
1636 result = copy_addr_to_reg (XEXP (result, 0));
1637 return convert_memory_address (ptr_mode, result);
1640 /* Perform an untyped return. */
1642 static void
1643 expand_builtin_return (rtx result)
1645 int size, align, regno;
1646 enum machine_mode mode;
1647 rtx reg;
1648 rtx call_fusage = 0;
1650 result = convert_memory_address (Pmode, result);
1652 apply_result_size ();
1653 result = gen_rtx_MEM (BLKmode, result);
1655 #ifdef HAVE_untyped_return
1656 if (HAVE_untyped_return)
1658 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1659 emit_barrier ();
1660 return;
1662 #endif
1664 /* Restore the return value and note that each value is used. */
1665 size = 0;
1666 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1667 if ((mode = apply_result_mode[regno]) != VOIDmode)
1669 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1670 if (size % align != 0)
1671 size = CEIL (size, align) * align;
1672 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1673 emit_move_insn (reg, adjust_address (result, mode, size));
1675 push_to_sequence (call_fusage);
1676 emit_use (reg);
1677 call_fusage = get_insns ();
1678 end_sequence ();
1679 size += GET_MODE_SIZE (mode);
1682 /* Put the USE insns before the return. */
1683 emit_insn (call_fusage);
1685 /* Return whatever values was restored by jumping directly to the end
1686 of the function. */
1687 expand_naked_return ();
1690 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1692 static enum type_class
1693 type_to_class (tree type)
1695 switch (TREE_CODE (type))
1697 case VOID_TYPE: return void_type_class;
1698 case INTEGER_TYPE: return integer_type_class;
1699 case ENUMERAL_TYPE: return enumeral_type_class;
1700 case BOOLEAN_TYPE: return boolean_type_class;
1701 case POINTER_TYPE: return pointer_type_class;
1702 case REFERENCE_TYPE: return reference_type_class;
1703 case OFFSET_TYPE: return offset_type_class;
1704 case REAL_TYPE: return real_type_class;
1705 case COMPLEX_TYPE: return complex_type_class;
1706 case FUNCTION_TYPE: return function_type_class;
1707 case METHOD_TYPE: return method_type_class;
1708 case RECORD_TYPE: return record_type_class;
1709 case UNION_TYPE:
1710 case QUAL_UNION_TYPE: return union_type_class;
1711 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1712 ? string_type_class : array_type_class);
1713 case LANG_TYPE: return lang_type_class;
1714 default: return no_type_class;
1718 /* Expand a call EXP to __builtin_classify_type. */
1720 static rtx
1721 expand_builtin_classify_type (tree exp)
1723 if (call_expr_nargs (exp))
1724 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1725 return GEN_INT (no_type_class);
1728 /* This helper macro, meant to be used in mathfn_built_in below,
1729 determines which among a set of three builtin math functions is
1730 appropriate for a given type mode. The `F' and `L' cases are
1731 automatically generated from the `double' case. */
1732 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1733 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1734 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1735 fcodel = BUILT_IN_MATHFN##L ; break;
1736 /* Similar to above, but appends _R after any F/L suffix. */
1737 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1738 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1739 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1740 fcodel = BUILT_IN_MATHFN##L_R ; break;
1742 /* Return mathematic function equivalent to FN but operating directly
1743 on TYPE, if available. If IMPLICIT is true find the function in
1744 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1745 can't do the conversion, return zero. */
1747 static tree
1748 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1750 tree const *const fn_arr
1751 = implicit ? implicit_built_in_decls : built_in_decls;
1752 enum built_in_function fcode, fcodef, fcodel;
1754 switch (fn)
1756 CASE_MATHFN (BUILT_IN_ACOS)
1757 CASE_MATHFN (BUILT_IN_ACOSH)
1758 CASE_MATHFN (BUILT_IN_ASIN)
1759 CASE_MATHFN (BUILT_IN_ASINH)
1760 CASE_MATHFN (BUILT_IN_ATAN)
1761 CASE_MATHFN (BUILT_IN_ATAN2)
1762 CASE_MATHFN (BUILT_IN_ATANH)
1763 CASE_MATHFN (BUILT_IN_CBRT)
1764 CASE_MATHFN (BUILT_IN_CEIL)
1765 CASE_MATHFN (BUILT_IN_CEXPI)
1766 CASE_MATHFN (BUILT_IN_COPYSIGN)
1767 CASE_MATHFN (BUILT_IN_COS)
1768 CASE_MATHFN (BUILT_IN_COSH)
1769 CASE_MATHFN (BUILT_IN_DREM)
1770 CASE_MATHFN (BUILT_IN_ERF)
1771 CASE_MATHFN (BUILT_IN_ERFC)
1772 CASE_MATHFN (BUILT_IN_EXP)
1773 CASE_MATHFN (BUILT_IN_EXP10)
1774 CASE_MATHFN (BUILT_IN_EXP2)
1775 CASE_MATHFN (BUILT_IN_EXPM1)
1776 CASE_MATHFN (BUILT_IN_FABS)
1777 CASE_MATHFN (BUILT_IN_FDIM)
1778 CASE_MATHFN (BUILT_IN_FLOOR)
1779 CASE_MATHFN (BUILT_IN_FMA)
1780 CASE_MATHFN (BUILT_IN_FMAX)
1781 CASE_MATHFN (BUILT_IN_FMIN)
1782 CASE_MATHFN (BUILT_IN_FMOD)
1783 CASE_MATHFN (BUILT_IN_FREXP)
1784 CASE_MATHFN (BUILT_IN_GAMMA)
1785 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1786 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1787 CASE_MATHFN (BUILT_IN_HYPOT)
1788 CASE_MATHFN (BUILT_IN_ILOGB)
1789 CASE_MATHFN (BUILT_IN_INF)
1790 CASE_MATHFN (BUILT_IN_ISINF)
1791 CASE_MATHFN (BUILT_IN_J0)
1792 CASE_MATHFN (BUILT_IN_J1)
1793 CASE_MATHFN (BUILT_IN_JN)
1794 CASE_MATHFN (BUILT_IN_LCEIL)
1795 CASE_MATHFN (BUILT_IN_LDEXP)
1796 CASE_MATHFN (BUILT_IN_LFLOOR)
1797 CASE_MATHFN (BUILT_IN_LGAMMA)
1798 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1799 CASE_MATHFN (BUILT_IN_LLCEIL)
1800 CASE_MATHFN (BUILT_IN_LLFLOOR)
1801 CASE_MATHFN (BUILT_IN_LLRINT)
1802 CASE_MATHFN (BUILT_IN_LLROUND)
1803 CASE_MATHFN (BUILT_IN_LOG)
1804 CASE_MATHFN (BUILT_IN_LOG10)
1805 CASE_MATHFN (BUILT_IN_LOG1P)
1806 CASE_MATHFN (BUILT_IN_LOG2)
1807 CASE_MATHFN (BUILT_IN_LOGB)
1808 CASE_MATHFN (BUILT_IN_LRINT)
1809 CASE_MATHFN (BUILT_IN_LROUND)
1810 CASE_MATHFN (BUILT_IN_MODF)
1811 CASE_MATHFN (BUILT_IN_NAN)
1812 CASE_MATHFN (BUILT_IN_NANS)
1813 CASE_MATHFN (BUILT_IN_NEARBYINT)
1814 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1815 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1816 CASE_MATHFN (BUILT_IN_POW)
1817 CASE_MATHFN (BUILT_IN_POWI)
1818 CASE_MATHFN (BUILT_IN_POW10)
1819 CASE_MATHFN (BUILT_IN_REMAINDER)
1820 CASE_MATHFN (BUILT_IN_REMQUO)
1821 CASE_MATHFN (BUILT_IN_RINT)
1822 CASE_MATHFN (BUILT_IN_ROUND)
1823 CASE_MATHFN (BUILT_IN_SCALB)
1824 CASE_MATHFN (BUILT_IN_SCALBLN)
1825 CASE_MATHFN (BUILT_IN_SCALBN)
1826 CASE_MATHFN (BUILT_IN_SIGNBIT)
1827 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1828 CASE_MATHFN (BUILT_IN_SIN)
1829 CASE_MATHFN (BUILT_IN_SINCOS)
1830 CASE_MATHFN (BUILT_IN_SINH)
1831 CASE_MATHFN (BUILT_IN_SQRT)
1832 CASE_MATHFN (BUILT_IN_TAN)
1833 CASE_MATHFN (BUILT_IN_TANH)
1834 CASE_MATHFN (BUILT_IN_TGAMMA)
1835 CASE_MATHFN (BUILT_IN_TRUNC)
1836 CASE_MATHFN (BUILT_IN_Y0)
1837 CASE_MATHFN (BUILT_IN_Y1)
1838 CASE_MATHFN (BUILT_IN_YN)
1840 default:
1841 return NULL_TREE;
1844 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1845 return fn_arr[fcode];
1846 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1847 return fn_arr[fcodef];
1848 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1849 return fn_arr[fcodel];
1850 else
1851 return NULL_TREE;
1854 /* Like mathfn_built_in_1(), but always use the implicit array. */
1856 tree
1857 mathfn_built_in (tree type, enum built_in_function fn)
1859 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1862 /* If errno must be maintained, expand the RTL to check if the result,
1863 TARGET, of a built-in function call, EXP, is NaN, and if so set
1864 errno to EDOM. */
1866 static void
1867 expand_errno_check (tree exp, rtx target)
1869 rtx lab = gen_label_rtx ();
1871 /* Test the result; if it is NaN, set errno=EDOM because
1872 the argument was not in the domain. */
1873 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1874 0, lab);
1876 #ifdef TARGET_EDOM
1877 /* If this built-in doesn't throw an exception, set errno directly. */
1878 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1880 #ifdef GEN_ERRNO_RTX
1881 rtx errno_rtx = GEN_ERRNO_RTX;
1882 #else
1883 rtx errno_rtx
1884 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1885 #endif
1886 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1887 emit_label (lab);
1888 return;
1890 #endif
1892 /* Make sure the library call isn't expanded as a tail call. */
1893 CALL_EXPR_TAILCALL (exp) = 0;
1895 /* We can't set errno=EDOM directly; let the library call do it.
1896 Pop the arguments right away in case the call gets deleted. */
1897 NO_DEFER_POP;
1898 expand_call (exp, target, 0);
1899 OK_DEFER_POP;
1900 emit_label (lab);
1903 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1904 Return NULL_RTX if a normal call should be emitted rather than expanding
1905 the function in-line. EXP is the expression that is a call to the builtin
1906 function; if convenient, the result should be placed in TARGET.
1907 SUBTARGET may be used as the target for computing one of EXP's operands. */
1909 static rtx
1910 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1912 optab builtin_optab;
1913 rtx op0, insns, before_call;
1914 tree fndecl = get_callee_fndecl (exp);
1915 enum machine_mode mode;
1916 bool errno_set = false;
1917 tree arg;
1919 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1920 return NULL_RTX;
1922 arg = CALL_EXPR_ARG (exp, 0);
1924 switch (DECL_FUNCTION_CODE (fndecl))
1926 CASE_FLT_FN (BUILT_IN_SQRT):
1927 errno_set = ! tree_expr_nonnegative_p (arg);
1928 builtin_optab = sqrt_optab;
1929 break;
1930 CASE_FLT_FN (BUILT_IN_EXP):
1931 errno_set = true; builtin_optab = exp_optab; break;
1932 CASE_FLT_FN (BUILT_IN_EXP10):
1933 CASE_FLT_FN (BUILT_IN_POW10):
1934 errno_set = true; builtin_optab = exp10_optab; break;
1935 CASE_FLT_FN (BUILT_IN_EXP2):
1936 errno_set = true; builtin_optab = exp2_optab; break;
1937 CASE_FLT_FN (BUILT_IN_EXPM1):
1938 errno_set = true; builtin_optab = expm1_optab; break;
1939 CASE_FLT_FN (BUILT_IN_LOGB):
1940 errno_set = true; builtin_optab = logb_optab; break;
1941 CASE_FLT_FN (BUILT_IN_LOG):
1942 errno_set = true; builtin_optab = log_optab; break;
1943 CASE_FLT_FN (BUILT_IN_LOG10):
1944 errno_set = true; builtin_optab = log10_optab; break;
1945 CASE_FLT_FN (BUILT_IN_LOG2):
1946 errno_set = true; builtin_optab = log2_optab; break;
1947 CASE_FLT_FN (BUILT_IN_LOG1P):
1948 errno_set = true; builtin_optab = log1p_optab; break;
1949 CASE_FLT_FN (BUILT_IN_ASIN):
1950 builtin_optab = asin_optab; break;
1951 CASE_FLT_FN (BUILT_IN_ACOS):
1952 builtin_optab = acos_optab; break;
1953 CASE_FLT_FN (BUILT_IN_TAN):
1954 builtin_optab = tan_optab; break;
1955 CASE_FLT_FN (BUILT_IN_ATAN):
1956 builtin_optab = atan_optab; break;
1957 CASE_FLT_FN (BUILT_IN_FLOOR):
1958 builtin_optab = floor_optab; break;
1959 CASE_FLT_FN (BUILT_IN_CEIL):
1960 builtin_optab = ceil_optab; break;
1961 CASE_FLT_FN (BUILT_IN_TRUNC):
1962 builtin_optab = btrunc_optab; break;
1963 CASE_FLT_FN (BUILT_IN_ROUND):
1964 builtin_optab = round_optab; break;
1965 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1966 builtin_optab = nearbyint_optab;
1967 if (flag_trapping_math)
1968 break;
1969 /* Else fallthrough and expand as rint. */
1970 CASE_FLT_FN (BUILT_IN_RINT):
1971 builtin_optab = rint_optab; break;
1972 default:
1973 gcc_unreachable ();
1976 /* Make a suitable register to place result in. */
1977 mode = TYPE_MODE (TREE_TYPE (exp));
1979 if (! flag_errno_math || ! HONOR_NANS (mode))
1980 errno_set = false;
1982 /* Before working hard, check whether the instruction is available. */
1983 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1985 target = gen_reg_rtx (mode);
1987 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1988 need to expand the argument again. This way, we will not perform
1989 side-effects more the once. */
1990 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1992 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1994 start_sequence ();
1996 /* Compute into TARGET.
1997 Set TARGET to wherever the result comes back. */
1998 target = expand_unop (mode, builtin_optab, op0, target, 0);
2000 if (target != 0)
2002 if (errno_set)
2003 expand_errno_check (exp, target);
2005 /* Output the entire sequence. */
2006 insns = get_insns ();
2007 end_sequence ();
2008 emit_insn (insns);
2009 return target;
2012 /* If we were unable to expand via the builtin, stop the sequence
2013 (without outputting the insns) and call to the library function
2014 with the stabilized argument list. */
2015 end_sequence ();
2018 before_call = get_last_insn ();
2020 return expand_call (exp, target, target == const0_rtx);
2023 /* Expand a call to the builtin binary math functions (pow and atan2).
2024 Return NULL_RTX if a normal call should be emitted rather than expanding the
2025 function in-line. EXP is the expression that is a call to the builtin
2026 function; if convenient, the result should be placed in TARGET.
2027 SUBTARGET may be used as the target for computing one of EXP's
2028 operands. */
2030 static rtx
2031 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2033 optab builtin_optab;
2034 rtx op0, op1, insns;
2035 int op1_type = REAL_TYPE;
2036 tree fndecl = get_callee_fndecl (exp);
2037 tree arg0, arg1;
2038 enum machine_mode mode;
2039 bool errno_set = true;
2041 switch (DECL_FUNCTION_CODE (fndecl))
2043 CASE_FLT_FN (BUILT_IN_SCALBN):
2044 CASE_FLT_FN (BUILT_IN_SCALBLN):
2045 CASE_FLT_FN (BUILT_IN_LDEXP):
2046 op1_type = INTEGER_TYPE;
2047 default:
2048 break;
2051 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2052 return NULL_RTX;
2054 arg0 = CALL_EXPR_ARG (exp, 0);
2055 arg1 = CALL_EXPR_ARG (exp, 1);
2057 switch (DECL_FUNCTION_CODE (fndecl))
2059 CASE_FLT_FN (BUILT_IN_POW):
2060 builtin_optab = pow_optab; break;
2061 CASE_FLT_FN (BUILT_IN_ATAN2):
2062 builtin_optab = atan2_optab; break;
2063 CASE_FLT_FN (BUILT_IN_SCALB):
2064 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2065 return 0;
2066 builtin_optab = scalb_optab; break;
2067 CASE_FLT_FN (BUILT_IN_SCALBN):
2068 CASE_FLT_FN (BUILT_IN_SCALBLN):
2069 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2070 return 0;
2071 /* Fall through... */
2072 CASE_FLT_FN (BUILT_IN_LDEXP):
2073 builtin_optab = ldexp_optab; break;
2074 CASE_FLT_FN (BUILT_IN_FMOD):
2075 builtin_optab = fmod_optab; break;
2076 CASE_FLT_FN (BUILT_IN_REMAINDER):
2077 CASE_FLT_FN (BUILT_IN_DREM):
2078 builtin_optab = remainder_optab; break;
2079 default:
2080 gcc_unreachable ();
2083 /* Make a suitable register to place result in. */
2084 mode = TYPE_MODE (TREE_TYPE (exp));
2086 /* Before working hard, check whether the instruction is available. */
2087 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2088 return NULL_RTX;
2090 target = gen_reg_rtx (mode);
2092 if (! flag_errno_math || ! HONOR_NANS (mode))
2093 errno_set = false;
2095 /* Always stabilize the argument list. */
2096 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2097 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2099 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2100 op1 = expand_normal (arg1);
2102 start_sequence ();
2104 /* Compute into TARGET.
2105 Set TARGET to wherever the result comes back. */
2106 target = expand_binop (mode, builtin_optab, op0, op1,
2107 target, 0, OPTAB_DIRECT);
2109 /* If we were unable to expand via the builtin, stop the sequence
2110 (without outputting the insns) and call to the library function
2111 with the stabilized argument list. */
2112 if (target == 0)
2114 end_sequence ();
2115 return expand_call (exp, target, target == const0_rtx);
2118 if (errno_set)
2119 expand_errno_check (exp, target);
2121 /* Output the entire sequence. */
2122 insns = get_insns ();
2123 end_sequence ();
2124 emit_insn (insns);
2126 return target;
2129 /* Expand a call to the builtin sin and cos math functions.
2130 Return NULL_RTX if a normal call should be emitted rather than expanding the
2131 function in-line. EXP is the expression that is a call to the builtin
2132 function; if convenient, the result should be placed in TARGET.
2133 SUBTARGET may be used as the target for computing one of EXP's
2134 operands. */
2136 static rtx
2137 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2139 optab builtin_optab;
2140 rtx op0, insns;
2141 tree fndecl = get_callee_fndecl (exp);
2142 enum machine_mode mode;
2143 tree arg;
2145 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2146 return NULL_RTX;
2148 arg = CALL_EXPR_ARG (exp, 0);
2150 switch (DECL_FUNCTION_CODE (fndecl))
2152 CASE_FLT_FN (BUILT_IN_SIN):
2153 CASE_FLT_FN (BUILT_IN_COS):
2154 builtin_optab = sincos_optab; break;
2155 default:
2156 gcc_unreachable ();
2159 /* Make a suitable register to place result in. */
2160 mode = TYPE_MODE (TREE_TYPE (exp));
2162 /* Check if sincos insn is available, otherwise fallback
2163 to sin or cos insn. */
2164 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2165 switch (DECL_FUNCTION_CODE (fndecl))
2167 CASE_FLT_FN (BUILT_IN_SIN):
2168 builtin_optab = sin_optab; break;
2169 CASE_FLT_FN (BUILT_IN_COS):
2170 builtin_optab = cos_optab; break;
2171 default:
2172 gcc_unreachable ();
2175 /* Before working hard, check whether the instruction is available. */
2176 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2178 target = gen_reg_rtx (mode);
2180 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2181 need to expand the argument again. This way, we will not perform
2182 side-effects more the once. */
2183 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2185 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2187 start_sequence ();
2189 /* Compute into TARGET.
2190 Set TARGET to wherever the result comes back. */
2191 if (builtin_optab == sincos_optab)
2193 int result;
2195 switch (DECL_FUNCTION_CODE (fndecl))
2197 CASE_FLT_FN (BUILT_IN_SIN):
2198 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2199 break;
2200 CASE_FLT_FN (BUILT_IN_COS):
2201 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2202 break;
2203 default:
2204 gcc_unreachable ();
2206 gcc_assert (result);
2208 else
2210 target = expand_unop (mode, builtin_optab, op0, target, 0);
2213 if (target != 0)
2215 /* Output the entire sequence. */
2216 insns = get_insns ();
2217 end_sequence ();
2218 emit_insn (insns);
2219 return target;
2222 /* If we were unable to expand via the builtin, stop the sequence
2223 (without outputting the insns) and call to the library function
2224 with the stabilized argument list. */
2225 end_sequence ();
2228 target = expand_call (exp, target, target == const0_rtx);
2230 return target;
2233 /* Expand a call to one of the builtin math functions that operate on
2234 floating point argument and output an integer result (ilogb, isinf,
2235 isnan, etc).
2236 Return 0 if a normal call should be emitted rather than expanding the
2237 function in-line. EXP is the expression that is a call to the builtin
2238 function; if convenient, the result should be placed in TARGET.
2239 SUBTARGET may be used as the target for computing one of EXP's operands. */
2241 static rtx
2242 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2244 optab builtin_optab = 0;
2245 enum insn_code icode = CODE_FOR_nothing;
2246 rtx op0;
2247 tree fndecl = get_callee_fndecl (exp);
2248 enum machine_mode mode;
2249 bool errno_set = false;
2250 tree arg;
2252 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2253 return NULL_RTX;
2255 arg = CALL_EXPR_ARG (exp, 0);
2257 switch (DECL_FUNCTION_CODE (fndecl))
2259 CASE_FLT_FN (BUILT_IN_ILOGB):
2260 errno_set = true; builtin_optab = ilogb_optab; break;
2261 CASE_FLT_FN (BUILT_IN_ISINF):
2262 builtin_optab = isinf_optab; break;
2263 case BUILT_IN_ISNORMAL:
2264 case BUILT_IN_ISFINITE:
2265 CASE_FLT_FN (BUILT_IN_FINITE):
2266 /* These builtins have no optabs (yet). */
2267 break;
2268 default:
2269 gcc_unreachable ();
2272 /* There's no easy way to detect the case we need to set EDOM. */
2273 if (flag_errno_math && errno_set)
2274 return NULL_RTX;
2276 /* Optab mode depends on the mode of the input argument. */
2277 mode = TYPE_MODE (TREE_TYPE (arg));
2279 if (builtin_optab)
2280 icode = optab_handler (builtin_optab, mode)->insn_code;
2282 /* Before working hard, check whether the instruction is available. */
2283 if (icode != CODE_FOR_nothing)
2285 /* Make a suitable register to place result in. */
2286 if (!target
2287 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2288 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2290 gcc_assert (insn_data[icode].operand[0].predicate
2291 (target, GET_MODE (target)));
2293 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2294 need to expand the argument again. This way, we will not perform
2295 side-effects more the once. */
2296 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2298 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2300 if (mode != GET_MODE (op0))
2301 op0 = convert_to_mode (mode, op0, 0);
2303 /* Compute into TARGET.
2304 Set TARGET to wherever the result comes back. */
2305 emit_unop_insn (icode, target, op0, UNKNOWN);
2306 return target;
2309 /* If there is no optab, try generic code. */
2310 switch (DECL_FUNCTION_CODE (fndecl))
2312 tree result;
2314 CASE_FLT_FN (BUILT_IN_ISINF):
2316 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2317 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2318 tree const type = TREE_TYPE (arg);
2319 REAL_VALUE_TYPE r;
2320 char buf[128];
2322 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2323 real_from_string (&r, buf);
2324 result = build_call_expr (isgr_fn, 2,
2325 fold_build1 (ABS_EXPR, type, arg),
2326 build_real (type, r));
2327 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2329 CASE_FLT_FN (BUILT_IN_FINITE):
2330 case BUILT_IN_ISFINITE:
2332 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2333 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2334 tree const type = TREE_TYPE (arg);
2335 REAL_VALUE_TYPE r;
2336 char buf[128];
2338 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2339 real_from_string (&r, buf);
2340 result = build_call_expr (isle_fn, 2,
2341 fold_build1 (ABS_EXPR, type, arg),
2342 build_real (type, r));
2343 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2345 case BUILT_IN_ISNORMAL:
2347 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2348 islessequal(fabs(x),DBL_MAX). */
2349 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2350 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2351 tree const type = TREE_TYPE (arg);
2352 REAL_VALUE_TYPE rmax, rmin;
2353 char buf[128];
2355 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2356 real_from_string (&rmax, buf);
2357 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2358 real_from_string (&rmin, buf);
2359 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2360 result = build_call_expr (isle_fn, 2, arg,
2361 build_real (type, rmax));
2362 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2363 build_call_expr (isge_fn, 2, arg,
2364 build_real (type, rmin)));
2365 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2367 default:
2368 break;
2371 target = expand_call (exp, target, target == const0_rtx);
2373 return target;
2376 /* Expand a call to the builtin sincos math function.
2377 Return NULL_RTX if a normal call should be emitted rather than expanding the
2378 function in-line. EXP is the expression that is a call to the builtin
2379 function. */
2381 static rtx
2382 expand_builtin_sincos (tree exp)
2384 rtx op0, op1, op2, target1, target2;
2385 enum machine_mode mode;
2386 tree arg, sinp, cosp;
2387 int result;
2389 if (!validate_arglist (exp, REAL_TYPE,
2390 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2391 return NULL_RTX;
2393 arg = CALL_EXPR_ARG (exp, 0);
2394 sinp = CALL_EXPR_ARG (exp, 1);
2395 cosp = CALL_EXPR_ARG (exp, 2);
2397 /* Make a suitable register to place result in. */
2398 mode = TYPE_MODE (TREE_TYPE (arg));
2400 /* Check if sincos insn is available, otherwise emit the call. */
2401 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2402 return NULL_RTX;
2404 target1 = gen_reg_rtx (mode);
2405 target2 = gen_reg_rtx (mode);
2407 op0 = expand_normal (arg);
2408 op1 = expand_normal (build_fold_indirect_ref (sinp));
2409 op2 = expand_normal (build_fold_indirect_ref (cosp));
2411 /* Compute into target1 and target2.
2412 Set TARGET to wherever the result comes back. */
2413 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2414 gcc_assert (result);
2416 /* Move target1 and target2 to the memory locations indicated
2417 by op1 and op2. */
2418 emit_move_insn (op1, target1);
2419 emit_move_insn (op2, target2);
2421 return const0_rtx;
2424 /* Expand a call to the internal cexpi builtin to the sincos math function.
2425 EXP is the expression that is a call to the builtin function; if convenient,
2426 the result should be placed in TARGET. SUBTARGET may be used as the target
2427 for computing one of EXP's operands. */
2429 static rtx
2430 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2432 tree fndecl = get_callee_fndecl (exp);
2433 tree arg, type;
2434 enum machine_mode mode;
2435 rtx op0, op1, op2;
2437 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2438 return NULL_RTX;
2440 arg = CALL_EXPR_ARG (exp, 0);
2441 type = TREE_TYPE (arg);
2442 mode = TYPE_MODE (TREE_TYPE (arg));
2444 /* Try expanding via a sincos optab, fall back to emitting a libcall
2445 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2446 is only generated from sincos, cexp or if we have either of them. */
2447 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2449 op1 = gen_reg_rtx (mode);
2450 op2 = gen_reg_rtx (mode);
2452 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2454 /* Compute into op1 and op2. */
2455 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2457 else if (TARGET_HAS_SINCOS)
2459 tree call, fn = NULL_TREE;
2460 tree top1, top2;
2461 rtx op1a, op2a;
2463 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2464 fn = built_in_decls[BUILT_IN_SINCOSF];
2465 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2466 fn = built_in_decls[BUILT_IN_SINCOS];
2467 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2468 fn = built_in_decls[BUILT_IN_SINCOSL];
2469 else
2470 gcc_unreachable ();
2472 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2473 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2474 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2475 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2476 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2477 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2479 /* Make sure not to fold the sincos call again. */
2480 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2481 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2482 call, 3, arg, top1, top2));
2484 else
2486 tree call, fn = NULL_TREE, narg;
2487 tree ctype = build_complex_type (type);
2489 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2490 fn = built_in_decls[BUILT_IN_CEXPF];
2491 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2492 fn = built_in_decls[BUILT_IN_CEXP];
2493 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2494 fn = built_in_decls[BUILT_IN_CEXPL];
2495 else
2496 gcc_unreachable ();
2498 /* If we don't have a decl for cexp create one. This is the
2499 friendliest fallback if the user calls __builtin_cexpi
2500 without full target C99 function support. */
2501 if (fn == NULL_TREE)
2503 tree fntype;
2504 const char *name = NULL;
2506 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2507 name = "cexpf";
2508 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2509 name = "cexp";
2510 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2511 name = "cexpl";
2513 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2514 fn = build_fn_decl (name, fntype);
2517 narg = fold_build2 (COMPLEX_EXPR, ctype,
2518 build_real (type, dconst0), arg);
2520 /* Make sure not to fold the cexp call again. */
2521 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2522 return expand_expr (build_call_nary (ctype, call, 1, narg),
2523 target, VOIDmode, EXPAND_NORMAL);
2526 /* Now build the proper return type. */
2527 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2528 make_tree (TREE_TYPE (arg), op2),
2529 make_tree (TREE_TYPE (arg), op1)),
2530 target, VOIDmode, EXPAND_NORMAL);
2533 /* Expand a call to one of the builtin rounding functions gcc defines
2534 as an extension (lfloor and lceil). As these are gcc extensions we
2535 do not need to worry about setting errno to EDOM.
2536 If expanding via optab fails, lower expression to (int)(floor(x)).
2537 EXP is the expression that is a call to the builtin function;
2538 if convenient, the result should be placed in TARGET. */
2540 static rtx
2541 expand_builtin_int_roundingfn (tree exp, rtx target)
2543 convert_optab builtin_optab;
2544 rtx op0, insns, tmp;
2545 tree fndecl = get_callee_fndecl (exp);
2546 enum built_in_function fallback_fn;
2547 tree fallback_fndecl;
2548 enum machine_mode mode;
2549 tree arg;
2551 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2552 gcc_unreachable ();
2554 arg = CALL_EXPR_ARG (exp, 0);
2556 switch (DECL_FUNCTION_CODE (fndecl))
2558 CASE_FLT_FN (BUILT_IN_LCEIL):
2559 CASE_FLT_FN (BUILT_IN_LLCEIL):
2560 builtin_optab = lceil_optab;
2561 fallback_fn = BUILT_IN_CEIL;
2562 break;
2564 CASE_FLT_FN (BUILT_IN_LFLOOR):
2565 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2566 builtin_optab = lfloor_optab;
2567 fallback_fn = BUILT_IN_FLOOR;
2568 break;
2570 default:
2571 gcc_unreachable ();
2574 /* Make a suitable register to place result in. */
2575 mode = TYPE_MODE (TREE_TYPE (exp));
2577 target = gen_reg_rtx (mode);
2579 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2580 need to expand the argument again. This way, we will not perform
2581 side-effects more the once. */
2582 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2584 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2586 start_sequence ();
2588 /* Compute into TARGET. */
2589 if (expand_sfix_optab (target, op0, builtin_optab))
2591 /* Output the entire sequence. */
2592 insns = get_insns ();
2593 end_sequence ();
2594 emit_insn (insns);
2595 return target;
2598 /* If we were unable to expand via the builtin, stop the sequence
2599 (without outputting the insns). */
2600 end_sequence ();
2602 /* Fall back to floating point rounding optab. */
2603 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2605 /* For non-C99 targets we may end up without a fallback fndecl here
2606 if the user called __builtin_lfloor directly. In this case emit
2607 a call to the floor/ceil variants nevertheless. This should result
2608 in the best user experience for not full C99 targets. */
2609 if (fallback_fndecl == NULL_TREE)
2611 tree fntype;
2612 const char *name = NULL;
2614 switch (DECL_FUNCTION_CODE (fndecl))
2616 case BUILT_IN_LCEIL:
2617 case BUILT_IN_LLCEIL:
2618 name = "ceil";
2619 break;
2620 case BUILT_IN_LCEILF:
2621 case BUILT_IN_LLCEILF:
2622 name = "ceilf";
2623 break;
2624 case BUILT_IN_LCEILL:
2625 case BUILT_IN_LLCEILL:
2626 name = "ceill";
2627 break;
2628 case BUILT_IN_LFLOOR:
2629 case BUILT_IN_LLFLOOR:
2630 name = "floor";
2631 break;
2632 case BUILT_IN_LFLOORF:
2633 case BUILT_IN_LLFLOORF:
2634 name = "floorf";
2635 break;
2636 case BUILT_IN_LFLOORL:
2637 case BUILT_IN_LLFLOORL:
2638 name = "floorl";
2639 break;
2640 default:
2641 gcc_unreachable ();
2644 fntype = build_function_type_list (TREE_TYPE (arg),
2645 TREE_TYPE (arg), NULL_TREE);
2646 fallback_fndecl = build_fn_decl (name, fntype);
2649 exp = build_call_expr (fallback_fndecl, 1, arg);
2651 tmp = expand_normal (exp);
2653 /* Truncate the result of floating point optab to integer
2654 via expand_fix (). */
2655 target = gen_reg_rtx (mode);
2656 expand_fix (target, tmp, 0);
2658 return target;
2661 /* Expand a call to one of the builtin math functions doing integer
2662 conversion (lrint).
2663 Return 0 if a normal call should be emitted rather than expanding the
2664 function in-line. EXP is the expression that is a call to the builtin
2665 function; if convenient, the result should be placed in TARGET. */
2667 static rtx
2668 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2670 convert_optab builtin_optab;
2671 rtx op0, insns;
2672 tree fndecl = get_callee_fndecl (exp);
2673 tree arg;
2674 enum machine_mode mode;
2676 /* There's no easy way to detect the case we need to set EDOM. */
2677 if (flag_errno_math)
2678 return NULL_RTX;
2680 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2681 gcc_unreachable ();
2683 arg = CALL_EXPR_ARG (exp, 0);
2685 switch (DECL_FUNCTION_CODE (fndecl))
2687 CASE_FLT_FN (BUILT_IN_LRINT):
2688 CASE_FLT_FN (BUILT_IN_LLRINT):
2689 builtin_optab = lrint_optab; break;
2690 CASE_FLT_FN (BUILT_IN_LROUND):
2691 CASE_FLT_FN (BUILT_IN_LLROUND):
2692 builtin_optab = lround_optab; break;
2693 default:
2694 gcc_unreachable ();
2697 /* Make a suitable register to place result in. */
2698 mode = TYPE_MODE (TREE_TYPE (exp));
2700 target = gen_reg_rtx (mode);
2702 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2703 need to expand the argument again. This way, we will not perform
2704 side-effects more the once. */
2705 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2707 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2709 start_sequence ();
2711 if (expand_sfix_optab (target, op0, builtin_optab))
2713 /* Output the entire sequence. */
2714 insns = get_insns ();
2715 end_sequence ();
2716 emit_insn (insns);
2717 return target;
2720 /* If we were unable to expand via the builtin, stop the sequence
2721 (without outputting the insns) and call to the library function
2722 with the stabilized argument list. */
2723 end_sequence ();
2725 target = expand_call (exp, target, target == const0_rtx);
2727 return target;
2730 /* To evaluate powi(x,n), the floating point value x raised to the
2731 constant integer exponent n, we use a hybrid algorithm that
2732 combines the "window method" with look-up tables. For an
2733 introduction to exponentiation algorithms and "addition chains",
2734 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2735 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2736 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2737 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2739 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2740 multiplications to inline before calling the system library's pow
2741 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2742 so this default never requires calling pow, powf or powl. */
2744 #ifndef POWI_MAX_MULTS
2745 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2746 #endif
2748 /* The size of the "optimal power tree" lookup table. All
2749 exponents less than this value are simply looked up in the
2750 powi_table below. This threshold is also used to size the
2751 cache of pseudo registers that hold intermediate results. */
2752 #define POWI_TABLE_SIZE 256
2754 /* The size, in bits of the window, used in the "window method"
2755 exponentiation algorithm. This is equivalent to a radix of
2756 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2757 #define POWI_WINDOW_SIZE 3
2759 /* The following table is an efficient representation of an
2760 "optimal power tree". For each value, i, the corresponding
2761 value, j, in the table states than an optimal evaluation
2762 sequence for calculating pow(x,i) can be found by evaluating
2763 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2764 100 integers is given in Knuth's "Seminumerical algorithms". */
2766 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2768 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2769 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2770 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2771 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2772 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2773 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2774 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2775 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2776 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2777 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2778 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2779 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2780 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2781 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2782 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2783 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2784 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2785 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2786 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2787 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2788 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2789 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2790 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2791 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2792 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2793 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2794 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2795 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2796 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2797 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2798 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2799 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2803 /* Return the number of multiplications required to calculate
2804 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2805 subroutine of powi_cost. CACHE is an array indicating
2806 which exponents have already been calculated. */
2808 static int
2809 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2811 /* If we've already calculated this exponent, then this evaluation
2812 doesn't require any additional multiplications. */
2813 if (cache[n])
2814 return 0;
2816 cache[n] = true;
2817 return powi_lookup_cost (n - powi_table[n], cache)
2818 + powi_lookup_cost (powi_table[n], cache) + 1;
2821 /* Return the number of multiplications required to calculate
2822 powi(x,n) for an arbitrary x, given the exponent N. This
2823 function needs to be kept in sync with expand_powi below. */
2825 static int
2826 powi_cost (HOST_WIDE_INT n)
2828 bool cache[POWI_TABLE_SIZE];
2829 unsigned HOST_WIDE_INT digit;
2830 unsigned HOST_WIDE_INT val;
2831 int result;
2833 if (n == 0)
2834 return 0;
2836 /* Ignore the reciprocal when calculating the cost. */
2837 val = (n < 0) ? -n : n;
2839 /* Initialize the exponent cache. */
2840 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2841 cache[1] = true;
2843 result = 0;
2845 while (val >= POWI_TABLE_SIZE)
2847 if (val & 1)
2849 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2850 result += powi_lookup_cost (digit, cache)
2851 + POWI_WINDOW_SIZE + 1;
2852 val >>= POWI_WINDOW_SIZE;
2854 else
2856 val >>= 1;
2857 result++;
2861 return result + powi_lookup_cost (val, cache);
2864 /* Recursive subroutine of expand_powi. This function takes the array,
2865 CACHE, of already calculated exponents and an exponent N and returns
2866 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2868 static rtx
2869 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2871 unsigned HOST_WIDE_INT digit;
2872 rtx target, result;
2873 rtx op0, op1;
2875 if (n < POWI_TABLE_SIZE)
2877 if (cache[n])
2878 return cache[n];
2880 target = gen_reg_rtx (mode);
2881 cache[n] = target;
2883 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2884 op1 = expand_powi_1 (mode, powi_table[n], cache);
2886 else if (n & 1)
2888 target = gen_reg_rtx (mode);
2889 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2890 op0 = expand_powi_1 (mode, n - digit, cache);
2891 op1 = expand_powi_1 (mode, digit, cache);
2893 else
2895 target = gen_reg_rtx (mode);
2896 op0 = expand_powi_1 (mode, n >> 1, cache);
2897 op1 = op0;
2900 result = expand_mult (mode, op0, op1, target, 0);
2901 if (result != target)
2902 emit_move_insn (target, result);
2903 return target;
2906 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2907 floating point operand in mode MODE, and N is the exponent. This
2908 function needs to be kept in sync with powi_cost above. */
2910 static rtx
2911 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2913 unsigned HOST_WIDE_INT val;
2914 rtx cache[POWI_TABLE_SIZE];
2915 rtx result;
2917 if (n == 0)
2918 return CONST1_RTX (mode);
2920 val = (n < 0) ? -n : n;
2922 memset (cache, 0, sizeof (cache));
2923 cache[1] = x;
2925 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2927 /* If the original exponent was negative, reciprocate the result. */
2928 if (n < 0)
2929 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2930 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2932 return result;
2935 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2936 a normal call should be emitted rather than expanding the function
2937 in-line. EXP is the expression that is a call to the builtin
2938 function; if convenient, the result should be placed in TARGET. */
2940 static rtx
2941 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2943 tree arg0, arg1;
2944 tree fn, narg0;
2945 tree type = TREE_TYPE (exp);
2946 REAL_VALUE_TYPE cint, c, c2;
2947 HOST_WIDE_INT n;
2948 rtx op, op2;
2949 enum machine_mode mode = TYPE_MODE (type);
2951 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2952 return NULL_RTX;
2954 arg0 = CALL_EXPR_ARG (exp, 0);
2955 arg1 = CALL_EXPR_ARG (exp, 1);
2957 if (TREE_CODE (arg1) != REAL_CST
2958 || TREE_OVERFLOW (arg1))
2959 return expand_builtin_mathfn_2 (exp, target, subtarget);
2961 /* Handle constant exponents. */
2963 /* For integer valued exponents we can expand to an optimal multiplication
2964 sequence using expand_powi. */
2965 c = TREE_REAL_CST (arg1);
2966 n = real_to_integer (&c);
2967 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2968 if (real_identical (&c, &cint)
2969 && ((n >= -1 && n <= 2)
2970 || (flag_unsafe_math_optimizations
2971 && optimize_insn_for_speed_p ()
2972 && powi_cost (n) <= POWI_MAX_MULTS)))
2974 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2975 if (n != 1)
2977 op = force_reg (mode, op);
2978 op = expand_powi (op, mode, n);
2980 return op;
2983 narg0 = builtin_save_expr (arg0);
2985 /* If the exponent is not integer valued, check if it is half of an integer.
2986 In this case we can expand to sqrt (x) * x**(n/2). */
2987 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2988 if (fn != NULL_TREE)
2990 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2991 n = real_to_integer (&c2);
2992 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2993 if (real_identical (&c2, &cint)
2994 && ((flag_unsafe_math_optimizations
2995 && optimize_insn_for_speed_p ()
2996 && powi_cost (n/2) <= POWI_MAX_MULTS)
2997 || n == 1))
2999 tree call_expr = build_call_expr (fn, 1, narg0);
3000 /* Use expand_expr in case the newly built call expression
3001 was folded to a non-call. */
3002 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3003 if (n != 1)
3005 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3006 op2 = force_reg (mode, op2);
3007 op2 = expand_powi (op2, mode, abs (n / 2));
3008 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3009 0, OPTAB_LIB_WIDEN);
3010 /* If the original exponent was negative, reciprocate the
3011 result. */
3012 if (n < 0)
3013 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3014 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3016 return op;
3020 /* Try if the exponent is a third of an integer. In this case
3021 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3022 different from pow (x, 1./3.) due to rounding and behavior
3023 with negative x we need to constrain this transformation to
3024 unsafe math and positive x or finite math. */
3025 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3026 if (fn != NULL_TREE
3027 && flag_unsafe_math_optimizations
3028 && (tree_expr_nonnegative_p (arg0)
3029 || !HONOR_NANS (mode)))
3031 REAL_VALUE_TYPE dconst3;
3032 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3033 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3034 real_round (&c2, mode, &c2);
3035 n = real_to_integer (&c2);
3036 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3037 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3038 real_convert (&c2, mode, &c2);
3039 if (real_identical (&c2, &c)
3040 && ((optimize_insn_for_speed_p ()
3041 && powi_cost (n/3) <= POWI_MAX_MULTS)
3042 || n == 1))
3044 tree call_expr = build_call_expr (fn, 1,narg0);
3045 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3046 if (abs (n) % 3 == 2)
3047 op = expand_simple_binop (mode, MULT, op, op, op,
3048 0, OPTAB_LIB_WIDEN);
3049 if (n != 1)
3051 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3052 op2 = force_reg (mode, op2);
3053 op2 = expand_powi (op2, mode, abs (n / 3));
3054 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3055 0, OPTAB_LIB_WIDEN);
3056 /* If the original exponent was negative, reciprocate the
3057 result. */
3058 if (n < 0)
3059 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3060 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3062 return op;
3066 /* Fall back to optab expansion. */
3067 return expand_builtin_mathfn_2 (exp, target, subtarget);
3070 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3071 a normal call should be emitted rather than expanding the function
3072 in-line. EXP is the expression that is a call to the builtin
3073 function; if convenient, the result should be placed in TARGET. */
3075 static rtx
3076 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3078 tree arg0, arg1;
3079 rtx op0, op1;
3080 enum machine_mode mode;
3081 enum machine_mode mode2;
3083 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3084 return NULL_RTX;
3086 arg0 = CALL_EXPR_ARG (exp, 0);
3087 arg1 = CALL_EXPR_ARG (exp, 1);
3088 mode = TYPE_MODE (TREE_TYPE (exp));
3090 /* Handle constant power. */
3092 if (TREE_CODE (arg1) == INTEGER_CST
3093 && !TREE_OVERFLOW (arg1))
3095 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3097 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3098 Otherwise, check the number of multiplications required. */
3099 if ((TREE_INT_CST_HIGH (arg1) == 0
3100 || TREE_INT_CST_HIGH (arg1) == -1)
3101 && ((n >= -1 && n <= 2)
3102 || (optimize_insn_for_speed_p ()
3103 && powi_cost (n) <= POWI_MAX_MULTS)))
3105 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3106 op0 = force_reg (mode, op0);
3107 return expand_powi (op0, mode, n);
3111 /* Emit a libcall to libgcc. */
3113 /* Mode of the 2nd argument must match that of an int. */
3114 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3116 if (target == NULL_RTX)
3117 target = gen_reg_rtx (mode);
3119 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3120 if (GET_MODE (op0) != mode)
3121 op0 = convert_to_mode (mode, op0, 0);
3122 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3123 if (GET_MODE (op1) != mode2)
3124 op1 = convert_to_mode (mode2, op1, 0);
3126 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3127 target, LCT_CONST, mode, 2,
3128 op0, mode, op1, mode2);
3130 return target;
3133 /* Expand expression EXP which is a call to the strlen builtin. Return
3134 NULL_RTX if we failed the caller should emit a normal call, otherwise
3135 try to get the result in TARGET, if convenient. */
3137 static rtx
3138 expand_builtin_strlen (tree exp, rtx target,
3139 enum machine_mode target_mode)
3141 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3142 return NULL_RTX;
3143 else
3145 rtx pat;
3146 tree len;
3147 tree src = CALL_EXPR_ARG (exp, 0);
3148 rtx result, src_reg, char_rtx, before_strlen;
3149 enum machine_mode insn_mode = target_mode, char_mode;
3150 enum insn_code icode = CODE_FOR_nothing;
3151 int align;
3153 /* If the length can be computed at compile-time, return it. */
3154 len = c_strlen (src, 0);
3155 if (len)
3156 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3158 /* If the length can be computed at compile-time and is constant
3159 integer, but there are side-effects in src, evaluate
3160 src for side-effects, then return len.
3161 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3162 can be optimized into: i++; x = 3; */
3163 len = c_strlen (src, 1);
3164 if (len && TREE_CODE (len) == INTEGER_CST)
3166 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3167 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3170 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3172 /* If SRC is not a pointer type, don't do this operation inline. */
3173 if (align == 0)
3174 return NULL_RTX;
3176 /* Bail out if we can't compute strlen in the right mode. */
3177 while (insn_mode != VOIDmode)
3179 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3180 if (icode != CODE_FOR_nothing)
3181 break;
3183 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3185 if (insn_mode == VOIDmode)
3186 return NULL_RTX;
3188 /* Make a place to write the result of the instruction. */
3189 result = target;
3190 if (! (result != 0
3191 && REG_P (result)
3192 && GET_MODE (result) == insn_mode
3193 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3194 result = gen_reg_rtx (insn_mode);
3196 /* Make a place to hold the source address. We will not expand
3197 the actual source until we are sure that the expansion will
3198 not fail -- there are trees that cannot be expanded twice. */
3199 src_reg = gen_reg_rtx (Pmode);
3201 /* Mark the beginning of the strlen sequence so we can emit the
3202 source operand later. */
3203 before_strlen = get_last_insn ();
3205 char_rtx = const0_rtx;
3206 char_mode = insn_data[(int) icode].operand[2].mode;
3207 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3208 char_mode))
3209 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3211 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3212 char_rtx, GEN_INT (align));
3213 if (! pat)
3214 return NULL_RTX;
3215 emit_insn (pat);
3217 /* Now that we are assured of success, expand the source. */
3218 start_sequence ();
3219 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3220 if (pat != src_reg)
3221 emit_move_insn (src_reg, pat);
3222 pat = get_insns ();
3223 end_sequence ();
3225 if (before_strlen)
3226 emit_insn_after (pat, before_strlen);
3227 else
3228 emit_insn_before (pat, get_insns ());
3230 /* Return the value in the proper mode for this function. */
3231 if (GET_MODE (result) == target_mode)
3232 target = result;
3233 else if (target != 0)
3234 convert_move (target, result, 0);
3235 else
3236 target = convert_to_mode (target_mode, result, 0);
3238 return target;
3242 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3243 caller should emit a normal call, otherwise try to get the result
3244 in TARGET, if convenient (and in mode MODE if that's convenient). */
3246 static rtx
3247 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3249 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3251 tree type = TREE_TYPE (exp);
3252 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3253 CALL_EXPR_ARG (exp, 1), type);
3254 if (result)
3255 return expand_expr (result, target, mode, EXPAND_NORMAL);
3257 return NULL_RTX;
3260 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3261 caller should emit a normal call, otherwise try to get the result
3262 in TARGET, if convenient (and in mode MODE if that's convenient). */
3264 static rtx
3265 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3267 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3269 tree type = TREE_TYPE (exp);
3270 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3271 CALL_EXPR_ARG (exp, 1), type);
3272 if (result)
3273 return expand_expr (result, target, mode, EXPAND_NORMAL);
3275 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3277 return NULL_RTX;
3280 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3281 caller should emit a normal call, otherwise try to get the result
3282 in TARGET, if convenient (and in mode MODE if that's convenient). */
3284 static rtx
3285 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3287 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3289 tree type = TREE_TYPE (exp);
3290 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3291 CALL_EXPR_ARG (exp, 1), type);
3292 if (result)
3293 return expand_expr (result, target, mode, EXPAND_NORMAL);
3295 return NULL_RTX;
3298 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3299 caller should emit a normal call, otherwise try to get the result
3300 in TARGET, if convenient (and in mode MODE if that's convenient). */
3302 static rtx
3303 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3305 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3307 tree type = TREE_TYPE (exp);
3308 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3309 CALL_EXPR_ARG (exp, 1), type);
3310 if (result)
3311 return expand_expr (result, target, mode, EXPAND_NORMAL);
3313 return NULL_RTX;
3316 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3317 bytes from constant string DATA + OFFSET and return it as target
3318 constant. */
3320 static rtx
3321 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3322 enum machine_mode mode)
3324 const char *str = (const char *) data;
3326 gcc_assert (offset >= 0
3327 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3328 <= strlen (str) + 1));
3330 return c_readstr (str + offset, mode);
3333 /* Expand a call EXP to the memcpy builtin.
3334 Return NULL_RTX if we failed, the caller should emit a normal call,
3335 otherwise try to get the result in TARGET, if convenient (and in
3336 mode MODE if that's convenient). */
3338 static rtx
3339 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3341 tree fndecl = get_callee_fndecl (exp);
3343 if (!validate_arglist (exp,
3344 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3345 return NULL_RTX;
3346 else
3348 tree dest = CALL_EXPR_ARG (exp, 0);
3349 tree src = CALL_EXPR_ARG (exp, 1);
3350 tree len = CALL_EXPR_ARG (exp, 2);
3351 const char *src_str;
3352 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3353 unsigned int dest_align
3354 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3355 rtx dest_mem, src_mem, dest_addr, len_rtx;
3356 tree result = fold_builtin_memory_op (dest, src, len,
3357 TREE_TYPE (TREE_TYPE (fndecl)),
3358 false, /*endp=*/0);
3359 HOST_WIDE_INT expected_size = -1;
3360 unsigned int expected_align = 0;
3361 tree_ann_common_t ann;
3363 if (result)
3365 while (TREE_CODE (result) == COMPOUND_EXPR)
3367 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3368 EXPAND_NORMAL);
3369 result = TREE_OPERAND (result, 1);
3371 return expand_expr (result, target, mode, EXPAND_NORMAL);
3374 /* If DEST is not a pointer type, call the normal function. */
3375 if (dest_align == 0)
3376 return NULL_RTX;
3378 /* If either SRC is not a pointer type, don't do this
3379 operation in-line. */
3380 if (src_align == 0)
3381 return NULL_RTX;
3383 ann = tree_common_ann (exp);
3384 if (ann)
3385 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3387 if (expected_align < dest_align)
3388 expected_align = dest_align;
3389 dest_mem = get_memory_rtx (dest, len);
3390 set_mem_align (dest_mem, dest_align);
3391 len_rtx = expand_normal (len);
3392 src_str = c_getstr (src);
3394 /* If SRC is a string constant and block move would be done
3395 by pieces, we can avoid loading the string from memory
3396 and only stored the computed constants. */
3397 if (src_str
3398 && GET_CODE (len_rtx) == CONST_INT
3399 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3400 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3401 CONST_CAST (char *, src_str),
3402 dest_align, false))
3404 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3405 builtin_memcpy_read_str,
3406 CONST_CAST (char *, src_str),
3407 dest_align, false, 0);
3408 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3409 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3410 return dest_mem;
3413 src_mem = get_memory_rtx (src, len);
3414 set_mem_align (src_mem, src_align);
3416 /* Copy word part most expediently. */
3417 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3418 CALL_EXPR_TAILCALL (exp)
3419 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3420 expected_align, expected_size);
3422 if (dest_addr == 0)
3424 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3425 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3427 return dest_addr;
3431 /* Expand a call EXP to the mempcpy builtin.
3432 Return NULL_RTX if we failed; the caller should emit a normal call,
3433 otherwise try to get the result in TARGET, if convenient (and in
3434 mode MODE if that's convenient). If ENDP is 0 return the
3435 destination pointer, if ENDP is 1 return the end pointer ala
3436 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3437 stpcpy. */
3439 static rtx
3440 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3442 if (!validate_arglist (exp,
3443 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3444 return NULL_RTX;
3445 else
3447 tree dest = CALL_EXPR_ARG (exp, 0);
3448 tree src = CALL_EXPR_ARG (exp, 1);
3449 tree len = CALL_EXPR_ARG (exp, 2);
3450 return expand_builtin_mempcpy_args (dest, src, len,
3451 TREE_TYPE (exp),
3452 target, mode, /*endp=*/ 1);
3456 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3457 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3458 so that this can also be called without constructing an actual CALL_EXPR.
3459 TYPE is the return type of the call. The other arguments and return value
3460 are the same as for expand_builtin_mempcpy. */
3462 static rtx
3463 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3464 rtx target, enum machine_mode mode, int endp)
3466 /* If return value is ignored, transform mempcpy into memcpy. */
3467 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3469 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3470 tree result = build_call_expr (fn, 3, dest, src, len);
3472 while (TREE_CODE (result) == COMPOUND_EXPR)
3474 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3475 EXPAND_NORMAL);
3476 result = TREE_OPERAND (result, 1);
3478 return expand_expr (result, target, mode, EXPAND_NORMAL);
3480 else
3482 const char *src_str;
3483 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3484 unsigned int dest_align
3485 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3486 rtx dest_mem, src_mem, len_rtx;
3487 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3489 if (result)
3491 while (TREE_CODE (result) == COMPOUND_EXPR)
3493 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3494 EXPAND_NORMAL);
3495 result = TREE_OPERAND (result, 1);
3497 return expand_expr (result, target, mode, EXPAND_NORMAL);
3500 /* If either SRC or DEST is not a pointer type, don't do this
3501 operation in-line. */
3502 if (dest_align == 0 || src_align == 0)
3503 return NULL_RTX;
3505 /* If LEN is not constant, call the normal function. */
3506 if (! host_integerp (len, 1))
3507 return NULL_RTX;
3509 len_rtx = expand_normal (len);
3510 src_str = c_getstr (src);
3512 /* If SRC is a string constant and block move would be done
3513 by pieces, we can avoid loading the string from memory
3514 and only stored the computed constants. */
3515 if (src_str
3516 && GET_CODE (len_rtx) == CONST_INT
3517 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3518 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3519 CONST_CAST (char *, src_str),
3520 dest_align, false))
3522 dest_mem = get_memory_rtx (dest, len);
3523 set_mem_align (dest_mem, dest_align);
3524 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3525 builtin_memcpy_read_str,
3526 CONST_CAST (char *, src_str),
3527 dest_align, false, endp);
3528 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3529 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3530 return dest_mem;
3533 if (GET_CODE (len_rtx) == CONST_INT
3534 && can_move_by_pieces (INTVAL (len_rtx),
3535 MIN (dest_align, src_align)))
3537 dest_mem = get_memory_rtx (dest, len);
3538 set_mem_align (dest_mem, dest_align);
3539 src_mem = get_memory_rtx (src, len);
3540 set_mem_align (src_mem, src_align);
3541 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3542 MIN (dest_align, src_align), endp);
3543 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3544 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3545 return dest_mem;
3548 return NULL_RTX;
3552 /* Expand expression EXP, which is a call to the memmove builtin. Return
3553 NULL_RTX if we failed; the caller should emit a normal call. */
3555 static rtx
3556 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3558 if (!validate_arglist (exp,
3559 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3560 return NULL_RTX;
3561 else
3563 tree dest = CALL_EXPR_ARG (exp, 0);
3564 tree src = CALL_EXPR_ARG (exp, 1);
3565 tree len = CALL_EXPR_ARG (exp, 2);
3566 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3567 target, mode, ignore);
3571 /* Helper function to do the actual work for expand_builtin_memmove. The
3572 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3573 so that this can also be called without constructing an actual CALL_EXPR.
3574 TYPE is the return type of the call. The other arguments and return value
3575 are the same as for expand_builtin_memmove. */
3577 static rtx
3578 expand_builtin_memmove_args (tree dest, tree src, tree len,
3579 tree type, rtx target, enum machine_mode mode,
3580 int ignore)
3582 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3584 if (result)
3586 STRIP_TYPE_NOPS (result);
3587 while (TREE_CODE (result) == COMPOUND_EXPR)
3589 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3590 EXPAND_NORMAL);
3591 result = TREE_OPERAND (result, 1);
3593 return expand_expr (result, target, mode, EXPAND_NORMAL);
3596 /* Otherwise, call the normal function. */
3597 return NULL_RTX;
3600 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3601 NULL_RTX if we failed the caller should emit a normal call. */
3603 static rtx
3604 expand_builtin_bcopy (tree exp, int ignore)
3606 tree type = TREE_TYPE (exp);
3607 tree src, dest, size;
3609 if (!validate_arglist (exp,
3610 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3611 return NULL_RTX;
3613 src = CALL_EXPR_ARG (exp, 0);
3614 dest = CALL_EXPR_ARG (exp, 1);
3615 size = CALL_EXPR_ARG (exp, 2);
3617 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3618 This is done this way so that if it isn't expanded inline, we fall
3619 back to calling bcopy instead of memmove. */
3620 return expand_builtin_memmove_args (dest, src,
3621 fold_convert (sizetype, size),
3622 type, const0_rtx, VOIDmode,
3623 ignore);
3626 #ifndef HAVE_movstr
3627 # define HAVE_movstr 0
3628 # define CODE_FOR_movstr CODE_FOR_nothing
3629 #endif
3631 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3632 we failed, the caller should emit a normal call, otherwise try to
3633 get the result in TARGET, if convenient. If ENDP is 0 return the
3634 destination pointer, if ENDP is 1 return the end pointer ala
3635 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3636 stpcpy. */
3638 static rtx
3639 expand_movstr (tree dest, tree src, rtx target, int endp)
3641 rtx end;
3642 rtx dest_mem;
3643 rtx src_mem;
3644 rtx insn;
3645 const struct insn_data * data;
3647 if (!HAVE_movstr)
3648 return NULL_RTX;
3650 dest_mem = get_memory_rtx (dest, NULL);
3651 src_mem = get_memory_rtx (src, NULL);
3652 if (!endp)
3654 target = force_reg (Pmode, XEXP (dest_mem, 0));
3655 dest_mem = replace_equiv_address (dest_mem, target);
3656 end = gen_reg_rtx (Pmode);
3658 else
3660 if (target == 0 || target == const0_rtx)
3662 end = gen_reg_rtx (Pmode);
3663 if (target == 0)
3664 target = end;
3666 else
3667 end = target;
3670 data = insn_data + CODE_FOR_movstr;
3672 if (data->operand[0].mode != VOIDmode)
3673 end = gen_lowpart (data->operand[0].mode, end);
3675 insn = data->genfun (end, dest_mem, src_mem);
3677 gcc_assert (insn);
3679 emit_insn (insn);
3681 /* movstr is supposed to set end to the address of the NUL
3682 terminator. If the caller requested a mempcpy-like return value,
3683 adjust it. */
3684 if (endp == 1 && target != const0_rtx)
3686 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3687 emit_move_insn (target, force_operand (tem, NULL_RTX));
3690 return target;
3693 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3694 NULL_RTX if we failed the caller should emit a normal call, otherwise
3695 try to get the result in TARGET, if convenient (and in mode MODE if that's
3696 convenient). */
3698 static rtx
3699 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3701 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3703 tree dest = CALL_EXPR_ARG (exp, 0);
3704 tree src = CALL_EXPR_ARG (exp, 1);
3705 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3707 return NULL_RTX;
3710 /* Helper function to do the actual work for expand_builtin_strcpy. The
3711 arguments to the builtin_strcpy call DEST and SRC are broken out
3712 so that this can also be called without constructing an actual CALL_EXPR.
3713 The other arguments and return value are the same as for
3714 expand_builtin_strcpy. */
3716 static rtx
3717 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3718 rtx target, enum machine_mode mode)
3720 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3721 if (result)
3722 return expand_expr (result, target, mode, EXPAND_NORMAL);
3723 return expand_movstr (dest, src, target, /*endp=*/0);
3727 /* Expand a call EXP to the stpcpy builtin.
3728 Return NULL_RTX if we failed the caller should emit a normal call,
3729 otherwise try to get the result in TARGET, if convenient (and in
3730 mode MODE if that's convenient). */
3732 static rtx
3733 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3735 tree dst, src;
3737 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3738 return NULL_RTX;
3740 dst = CALL_EXPR_ARG (exp, 0);
3741 src = CALL_EXPR_ARG (exp, 1);
3743 /* If return value is ignored, transform stpcpy into strcpy. */
3744 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3746 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3747 tree result = build_call_expr (fn, 2, dst, src);
3749 STRIP_NOPS (result);
3750 while (TREE_CODE (result) == COMPOUND_EXPR)
3752 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3753 EXPAND_NORMAL);
3754 result = TREE_OPERAND (result, 1);
3756 return expand_expr (result, target, mode, EXPAND_NORMAL);
3758 else
3760 tree len, lenp1;
3761 rtx ret;
3763 /* Ensure we get an actual string whose length can be evaluated at
3764 compile-time, not an expression containing a string. This is
3765 because the latter will potentially produce pessimized code
3766 when used to produce the return value. */
3767 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3768 return expand_movstr (dst, src, target, /*endp=*/2);
3770 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3771 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3772 target, mode, /*endp=*/2);
3774 if (ret)
3775 return ret;
3777 if (TREE_CODE (len) == INTEGER_CST)
3779 rtx len_rtx = expand_normal (len);
3781 if (GET_CODE (len_rtx) == CONST_INT)
3783 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3784 dst, src, target, mode);
3786 if (ret)
3788 if (! target)
3790 if (mode != VOIDmode)
3791 target = gen_reg_rtx (mode);
3792 else
3793 target = gen_reg_rtx (GET_MODE (ret));
3795 if (GET_MODE (target) != GET_MODE (ret))
3796 ret = gen_lowpart (GET_MODE (target), ret);
3798 ret = plus_constant (ret, INTVAL (len_rtx));
3799 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3800 gcc_assert (ret);
3802 return target;
3807 return expand_movstr (dst, src, target, /*endp=*/2);
3811 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3812 bytes from constant string DATA + OFFSET and return it as target
3813 constant. */
3816 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3817 enum machine_mode mode)
3819 const char *str = (const char *) data;
3821 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3822 return const0_rtx;
3824 return c_readstr (str + offset, mode);
3827 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3828 NULL_RTX if we failed the caller should emit a normal call. */
3830 static rtx
3831 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3833 tree fndecl = get_callee_fndecl (exp);
3835 if (validate_arglist (exp,
3836 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3838 tree dest = CALL_EXPR_ARG (exp, 0);
3839 tree src = CALL_EXPR_ARG (exp, 1);
3840 tree len = CALL_EXPR_ARG (exp, 2);
3841 tree slen = c_strlen (src, 1);
3842 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3844 if (result)
3846 while (TREE_CODE (result) == COMPOUND_EXPR)
3848 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3849 EXPAND_NORMAL);
3850 result = TREE_OPERAND (result, 1);
3852 return expand_expr (result, target, mode, EXPAND_NORMAL);
3855 /* We must be passed a constant len and src parameter. */
3856 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3857 return NULL_RTX;
3859 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3861 /* We're required to pad with trailing zeros if the requested
3862 len is greater than strlen(s2)+1. In that case try to
3863 use store_by_pieces, if it fails, punt. */
3864 if (tree_int_cst_lt (slen, len))
3866 unsigned int dest_align
3867 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3868 const char *p = c_getstr (src);
3869 rtx dest_mem;
3871 if (!p || dest_align == 0 || !host_integerp (len, 1)
3872 || !can_store_by_pieces (tree_low_cst (len, 1),
3873 builtin_strncpy_read_str,
3874 CONST_CAST (char *, p),
3875 dest_align, false))
3876 return NULL_RTX;
3878 dest_mem = get_memory_rtx (dest, len);
3879 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3880 builtin_strncpy_read_str,
3881 CONST_CAST (char *, p), dest_align, false, 0);
3882 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3883 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3884 return dest_mem;
3887 return NULL_RTX;
3890 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3891 bytes from constant string DATA + OFFSET and return it as target
3892 constant. */
3895 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3896 enum machine_mode mode)
3898 const char *c = (const char *) data;
3899 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3901 memset (p, *c, GET_MODE_SIZE (mode));
3903 return c_readstr (p, mode);
3906 /* Callback routine for store_by_pieces. Return the RTL of a register
3907 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3908 char value given in the RTL register data. For example, if mode is
3909 4 bytes wide, return the RTL for 0x01010101*data. */
3911 static rtx
3912 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3913 enum machine_mode mode)
3915 rtx target, coeff;
3916 size_t size;
3917 char *p;
3919 size = GET_MODE_SIZE (mode);
3920 if (size == 1)
3921 return (rtx) data;
3923 p = XALLOCAVEC (char, size);
3924 memset (p, 1, size);
3925 coeff = c_readstr (p, mode);
3927 target = convert_to_mode (mode, (rtx) data, 1);
3928 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3929 return force_reg (mode, target);
3932 /* Expand expression EXP, which is a call to the memset builtin. Return
3933 NULL_RTX if we failed the caller should emit a normal call, otherwise
3934 try to get the result in TARGET, if convenient (and in mode MODE if that's
3935 convenient). */
3937 static rtx
3938 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3940 if (!validate_arglist (exp,
3941 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3942 return NULL_RTX;
3943 else
3945 tree dest = CALL_EXPR_ARG (exp, 0);
3946 tree val = CALL_EXPR_ARG (exp, 1);
3947 tree len = CALL_EXPR_ARG (exp, 2);
3948 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3952 /* Helper function to do the actual work for expand_builtin_memset. The
3953 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3954 so that this can also be called without constructing an actual CALL_EXPR.
3955 The other arguments and return value are the same as for
3956 expand_builtin_memset. */
3958 static rtx
3959 expand_builtin_memset_args (tree dest, tree val, tree len,
3960 rtx target, enum machine_mode mode, tree orig_exp)
3962 tree fndecl, fn;
3963 enum built_in_function fcode;
3964 char c;
3965 unsigned int dest_align;
3966 rtx dest_mem, dest_addr, len_rtx;
3967 HOST_WIDE_INT expected_size = -1;
3968 unsigned int expected_align = 0;
3969 tree_ann_common_t ann;
3971 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3973 /* If DEST is not a pointer type, don't do this operation in-line. */
3974 if (dest_align == 0)
3975 return NULL_RTX;
3977 ann = tree_common_ann (orig_exp);
3978 if (ann)
3979 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3981 if (expected_align < dest_align)
3982 expected_align = dest_align;
3984 /* If the LEN parameter is zero, return DEST. */
3985 if (integer_zerop (len))
3987 /* Evaluate and ignore VAL in case it has side-effects. */
3988 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3989 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3992 /* Stabilize the arguments in case we fail. */
3993 dest = builtin_save_expr (dest);
3994 val = builtin_save_expr (val);
3995 len = builtin_save_expr (len);
3997 len_rtx = expand_normal (len);
3998 dest_mem = get_memory_rtx (dest, len);
4000 if (TREE_CODE (val) != INTEGER_CST)
4002 rtx val_rtx;
4004 val_rtx = expand_normal (val);
4005 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4006 val_rtx, 0);
4008 /* Assume that we can memset by pieces if we can store
4009 * the coefficients by pieces (in the required modes).
4010 * We can't pass builtin_memset_gen_str as that emits RTL. */
4011 c = 1;
4012 if (host_integerp (len, 1)
4013 && can_store_by_pieces (tree_low_cst (len, 1),
4014 builtin_memset_read_str, &c, dest_align,
4015 true))
4017 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4018 val_rtx);
4019 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4020 builtin_memset_gen_str, val_rtx, dest_align,
4021 true, 0);
4023 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4024 dest_align, expected_align,
4025 expected_size))
4026 goto do_libcall;
4028 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4029 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4030 return dest_mem;
4033 if (target_char_cast (val, &c))
4034 goto do_libcall;
4036 if (c)
4038 if (host_integerp (len, 1)
4039 && can_store_by_pieces (tree_low_cst (len, 1),
4040 builtin_memset_read_str, &c, dest_align,
4041 true))
4042 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4043 builtin_memset_read_str, &c, dest_align, true, 0);
4044 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4045 dest_align, expected_align,
4046 expected_size))
4047 goto do_libcall;
4049 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4050 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4051 return dest_mem;
4054 set_mem_align (dest_mem, dest_align);
4055 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4056 CALL_EXPR_TAILCALL (orig_exp)
4057 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4058 expected_align, expected_size);
4060 if (dest_addr == 0)
4062 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4063 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4066 return dest_addr;
4068 do_libcall:
4069 fndecl = get_callee_fndecl (orig_exp);
4070 fcode = DECL_FUNCTION_CODE (fndecl);
4071 if (fcode == BUILT_IN_MEMSET)
4072 fn = build_call_expr (fndecl, 3, dest, val, len);
4073 else if (fcode == BUILT_IN_BZERO)
4074 fn = build_call_expr (fndecl, 2, dest, len);
4075 else
4076 gcc_unreachable ();
4077 if (TREE_CODE (fn) == CALL_EXPR)
4078 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4079 return expand_call (fn, target, target == const0_rtx);
4082 /* Expand expression EXP, which is a call to the bzero builtin. Return
4083 NULL_RTX if we failed the caller should emit a normal call. */
4085 static rtx
4086 expand_builtin_bzero (tree exp)
4088 tree dest, size;
4090 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4091 return NULL_RTX;
4093 dest = CALL_EXPR_ARG (exp, 0);
4094 size = CALL_EXPR_ARG (exp, 1);
4096 /* New argument list transforming bzero(ptr x, int y) to
4097 memset(ptr x, int 0, size_t y). This is done this way
4098 so that if it isn't expanded inline, we fallback to
4099 calling bzero instead of memset. */
4101 return expand_builtin_memset_args (dest, integer_zero_node,
4102 fold_convert (sizetype, size),
4103 const0_rtx, VOIDmode, exp);
4106 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4107 caller should emit a normal call, otherwise try to get the result
4108 in TARGET, if convenient (and in mode MODE if that's convenient). */
4110 static rtx
4111 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4113 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4114 INTEGER_TYPE, VOID_TYPE))
4116 tree type = TREE_TYPE (exp);
4117 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4118 CALL_EXPR_ARG (exp, 1),
4119 CALL_EXPR_ARG (exp, 2), type);
4120 if (result)
4121 return expand_expr (result, target, mode, EXPAND_NORMAL);
4123 return NULL_RTX;
4126 /* Expand expression EXP, which is a call to the memcmp built-in function.
4127 Return NULL_RTX if we failed and the
4128 caller should emit a normal call, otherwise try to get the result in
4129 TARGET, if convenient (and in mode MODE, if that's convenient). */
4131 static rtx
4132 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4134 if (!validate_arglist (exp,
4135 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4136 return NULL_RTX;
4137 else
4139 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4140 CALL_EXPR_ARG (exp, 1),
4141 CALL_EXPR_ARG (exp, 2));
4142 if (result)
4143 return expand_expr (result, target, mode, EXPAND_NORMAL);
4146 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4148 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4149 rtx result;
4150 rtx insn;
4151 tree arg1 = CALL_EXPR_ARG (exp, 0);
4152 tree arg2 = CALL_EXPR_ARG (exp, 1);
4153 tree len = CALL_EXPR_ARG (exp, 2);
4155 int arg1_align
4156 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4157 int arg2_align
4158 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4159 enum machine_mode insn_mode;
4161 #ifdef HAVE_cmpmemsi
4162 if (HAVE_cmpmemsi)
4163 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4164 else
4165 #endif
4166 #ifdef HAVE_cmpstrnsi
4167 if (HAVE_cmpstrnsi)
4168 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4169 else
4170 #endif
4171 return NULL_RTX;
4173 /* If we don't have POINTER_TYPE, call the function. */
4174 if (arg1_align == 0 || arg2_align == 0)
4175 return NULL_RTX;
4177 /* Make a place to write the result of the instruction. */
4178 result = target;
4179 if (! (result != 0
4180 && REG_P (result) && GET_MODE (result) == insn_mode
4181 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4182 result = gen_reg_rtx (insn_mode);
4184 arg1_rtx = get_memory_rtx (arg1, len);
4185 arg2_rtx = get_memory_rtx (arg2, len);
4186 arg3_rtx = expand_normal (len);
4188 /* Set MEM_SIZE as appropriate. */
4189 if (GET_CODE (arg3_rtx) == CONST_INT)
4191 set_mem_size (arg1_rtx, arg3_rtx);
4192 set_mem_size (arg2_rtx, arg3_rtx);
4195 #ifdef HAVE_cmpmemsi
4196 if (HAVE_cmpmemsi)
4197 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4198 GEN_INT (MIN (arg1_align, arg2_align)));
4199 else
4200 #endif
4201 #ifdef HAVE_cmpstrnsi
4202 if (HAVE_cmpstrnsi)
4203 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4204 GEN_INT (MIN (arg1_align, arg2_align)));
4205 else
4206 #endif
4207 gcc_unreachable ();
4209 if (insn)
4210 emit_insn (insn);
4211 else
4212 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4213 TYPE_MODE (integer_type_node), 3,
4214 XEXP (arg1_rtx, 0), Pmode,
4215 XEXP (arg2_rtx, 0), Pmode,
4216 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4217 TYPE_UNSIGNED (sizetype)),
4218 TYPE_MODE (sizetype));
4220 /* Return the value in the proper mode for this function. */
4221 mode = TYPE_MODE (TREE_TYPE (exp));
4222 if (GET_MODE (result) == mode)
4223 return result;
4224 else if (target != 0)
4226 convert_move (target, result, 0);
4227 return target;
4229 else
4230 return convert_to_mode (mode, result, 0);
4232 #endif
4234 return NULL_RTX;
4237 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4238 if we failed the caller should emit a normal call, otherwise try to get
4239 the result in TARGET, if convenient. */
4241 static rtx
4242 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4244 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4245 return NULL_RTX;
4246 else
4248 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4249 CALL_EXPR_ARG (exp, 1));
4250 if (result)
4251 return expand_expr (result, target, mode, EXPAND_NORMAL);
4254 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4255 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4256 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4258 rtx arg1_rtx, arg2_rtx;
4259 rtx result, insn = NULL_RTX;
4260 tree fndecl, fn;
4261 tree arg1 = CALL_EXPR_ARG (exp, 0);
4262 tree arg2 = CALL_EXPR_ARG (exp, 1);
4264 int arg1_align
4265 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4266 int arg2_align
4267 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4269 /* If we don't have POINTER_TYPE, call the function. */
4270 if (arg1_align == 0 || arg2_align == 0)
4271 return NULL_RTX;
4273 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4274 arg1 = builtin_save_expr (arg1);
4275 arg2 = builtin_save_expr (arg2);
4277 arg1_rtx = get_memory_rtx (arg1, NULL);
4278 arg2_rtx = get_memory_rtx (arg2, NULL);
4280 #ifdef HAVE_cmpstrsi
4281 /* Try to call cmpstrsi. */
4282 if (HAVE_cmpstrsi)
4284 enum machine_mode insn_mode
4285 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4287 /* Make a place to write the result of the instruction. */
4288 result = target;
4289 if (! (result != 0
4290 && REG_P (result) && GET_MODE (result) == insn_mode
4291 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4292 result = gen_reg_rtx (insn_mode);
4294 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4295 GEN_INT (MIN (arg1_align, arg2_align)));
4297 #endif
4298 #ifdef HAVE_cmpstrnsi
4299 /* Try to determine at least one length and call cmpstrnsi. */
4300 if (!insn && HAVE_cmpstrnsi)
4302 tree len;
4303 rtx arg3_rtx;
4305 enum machine_mode insn_mode
4306 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4307 tree len1 = c_strlen (arg1, 1);
4308 tree len2 = c_strlen (arg2, 1);
4310 if (len1)
4311 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4312 if (len2)
4313 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4315 /* If we don't have a constant length for the first, use the length
4316 of the second, if we know it. We don't require a constant for
4317 this case; some cost analysis could be done if both are available
4318 but neither is constant. For now, assume they're equally cheap,
4319 unless one has side effects. If both strings have constant lengths,
4320 use the smaller. */
4322 if (!len1)
4323 len = len2;
4324 else if (!len2)
4325 len = len1;
4326 else if (TREE_SIDE_EFFECTS (len1))
4327 len = len2;
4328 else if (TREE_SIDE_EFFECTS (len2))
4329 len = len1;
4330 else if (TREE_CODE (len1) != INTEGER_CST)
4331 len = len2;
4332 else if (TREE_CODE (len2) != INTEGER_CST)
4333 len = len1;
4334 else if (tree_int_cst_lt (len1, len2))
4335 len = len1;
4336 else
4337 len = len2;
4339 /* If both arguments have side effects, we cannot optimize. */
4340 if (!len || TREE_SIDE_EFFECTS (len))
4341 goto do_libcall;
4343 arg3_rtx = expand_normal (len);
4345 /* Make a place to write the result of the instruction. */
4346 result = target;
4347 if (! (result != 0
4348 && REG_P (result) && GET_MODE (result) == insn_mode
4349 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4350 result = gen_reg_rtx (insn_mode);
4352 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4353 GEN_INT (MIN (arg1_align, arg2_align)));
4355 #endif
4357 if (insn)
4359 emit_insn (insn);
4361 /* Return the value in the proper mode for this function. */
4362 mode = TYPE_MODE (TREE_TYPE (exp));
4363 if (GET_MODE (result) == mode)
4364 return result;
4365 if (target == 0)
4366 return convert_to_mode (mode, result, 0);
4367 convert_move (target, result, 0);
4368 return target;
4371 /* Expand the library call ourselves using a stabilized argument
4372 list to avoid re-evaluating the function's arguments twice. */
4373 #ifdef HAVE_cmpstrnsi
4374 do_libcall:
4375 #endif
4376 fndecl = get_callee_fndecl (exp);
4377 fn = build_call_expr (fndecl, 2, arg1, arg2);
4378 if (TREE_CODE (fn) == CALL_EXPR)
4379 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4380 return expand_call (fn, target, target == const0_rtx);
4382 #endif
4383 return NULL_RTX;
4386 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4387 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4388 the result in TARGET, if convenient. */
4390 static rtx
4391 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4393 if (!validate_arglist (exp,
4394 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4395 return NULL_RTX;
4396 else
4398 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4399 CALL_EXPR_ARG (exp, 1),
4400 CALL_EXPR_ARG (exp, 2));
4401 if (result)
4402 return expand_expr (result, target, mode, EXPAND_NORMAL);
4405 /* If c_strlen can determine an expression for one of the string
4406 lengths, and it doesn't have side effects, then emit cmpstrnsi
4407 using length MIN(strlen(string)+1, arg3). */
4408 #ifdef HAVE_cmpstrnsi
4409 if (HAVE_cmpstrnsi)
4411 tree len, len1, len2;
4412 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4413 rtx result, insn;
4414 tree fndecl, fn;
4415 tree arg1 = CALL_EXPR_ARG (exp, 0);
4416 tree arg2 = CALL_EXPR_ARG (exp, 1);
4417 tree arg3 = CALL_EXPR_ARG (exp, 2);
4419 int arg1_align
4420 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4421 int arg2_align
4422 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4423 enum machine_mode insn_mode
4424 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4426 len1 = c_strlen (arg1, 1);
4427 len2 = c_strlen (arg2, 1);
4429 if (len1)
4430 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4431 if (len2)
4432 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4434 /* If we don't have a constant length for the first, use the length
4435 of the second, if we know it. We don't require a constant for
4436 this case; some cost analysis could be done if both are available
4437 but neither is constant. For now, assume they're equally cheap,
4438 unless one has side effects. If both strings have constant lengths,
4439 use the smaller. */
4441 if (!len1)
4442 len = len2;
4443 else if (!len2)
4444 len = len1;
4445 else if (TREE_SIDE_EFFECTS (len1))
4446 len = len2;
4447 else if (TREE_SIDE_EFFECTS (len2))
4448 len = len1;
4449 else if (TREE_CODE (len1) != INTEGER_CST)
4450 len = len2;
4451 else if (TREE_CODE (len2) != INTEGER_CST)
4452 len = len1;
4453 else if (tree_int_cst_lt (len1, len2))
4454 len = len1;
4455 else
4456 len = len2;
4458 /* If both arguments have side effects, we cannot optimize. */
4459 if (!len || TREE_SIDE_EFFECTS (len))
4460 return NULL_RTX;
4462 /* The actual new length parameter is MIN(len,arg3). */
4463 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4464 fold_convert (TREE_TYPE (len), arg3));
4466 /* If we don't have POINTER_TYPE, call the function. */
4467 if (arg1_align == 0 || arg2_align == 0)
4468 return NULL_RTX;
4470 /* Make a place to write the result of the instruction. */
4471 result = target;
4472 if (! (result != 0
4473 && REG_P (result) && GET_MODE (result) == insn_mode
4474 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4475 result = gen_reg_rtx (insn_mode);
4477 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4478 arg1 = builtin_save_expr (arg1);
4479 arg2 = builtin_save_expr (arg2);
4480 len = builtin_save_expr (len);
4482 arg1_rtx = get_memory_rtx (arg1, len);
4483 arg2_rtx = get_memory_rtx (arg2, len);
4484 arg3_rtx = expand_normal (len);
4485 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4486 GEN_INT (MIN (arg1_align, arg2_align)));
4487 if (insn)
4489 emit_insn (insn);
4491 /* Return the value in the proper mode for this function. */
4492 mode = TYPE_MODE (TREE_TYPE (exp));
4493 if (GET_MODE (result) == mode)
4494 return result;
4495 if (target == 0)
4496 return convert_to_mode (mode, result, 0);
4497 convert_move (target, result, 0);
4498 return target;
4501 /* Expand the library call ourselves using a stabilized argument
4502 list to avoid re-evaluating the function's arguments twice. */
4503 fndecl = get_callee_fndecl (exp);
4504 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4505 if (TREE_CODE (fn) == CALL_EXPR)
4506 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4507 return expand_call (fn, target, target == const0_rtx);
4509 #endif
4510 return NULL_RTX;
4513 /* Expand expression EXP, which is a call to the strcat builtin.
4514 Return NULL_RTX if we failed the caller should emit a normal call,
4515 otherwise try to get the result in TARGET, if convenient. */
4517 static rtx
4518 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4520 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4521 return NULL_RTX;
4522 else
4524 tree dst = CALL_EXPR_ARG (exp, 0);
4525 tree src = CALL_EXPR_ARG (exp, 1);
4526 const char *p = c_getstr (src);
4528 /* If the string length is zero, return the dst parameter. */
4529 if (p && *p == '\0')
4530 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4532 if (optimize_insn_for_speed_p ())
4534 /* See if we can store by pieces into (dst + strlen(dst)). */
4535 tree newsrc, newdst,
4536 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4537 rtx insns;
4539 /* Stabilize the argument list. */
4540 newsrc = builtin_save_expr (src);
4541 dst = builtin_save_expr (dst);
4543 start_sequence ();
4545 /* Create strlen (dst). */
4546 newdst = build_call_expr (strlen_fn, 1, dst);
4547 /* Create (dst p+ strlen (dst)). */
4549 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4550 newdst = builtin_save_expr (newdst);
4552 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4554 end_sequence (); /* Stop sequence. */
4555 return NULL_RTX;
4558 /* Output the entire sequence. */
4559 insns = get_insns ();
4560 end_sequence ();
4561 emit_insn (insns);
4563 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4566 return NULL_RTX;
4570 /* Expand expression EXP, which is a call to the strncat builtin.
4571 Return NULL_RTX if we failed the caller should emit a normal call,
4572 otherwise try to get the result in TARGET, if convenient. */
4574 static rtx
4575 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4577 if (validate_arglist (exp,
4578 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4580 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4581 CALL_EXPR_ARG (exp, 1),
4582 CALL_EXPR_ARG (exp, 2));
4583 if (result)
4584 return expand_expr (result, target, mode, EXPAND_NORMAL);
4586 return NULL_RTX;
4589 /* Expand expression EXP, which is a call to the strspn builtin.
4590 Return NULL_RTX if we failed the caller should emit a normal call,
4591 otherwise try to get the result in TARGET, if convenient. */
4593 static rtx
4594 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4596 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4598 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4599 CALL_EXPR_ARG (exp, 1));
4600 if (result)
4601 return expand_expr (result, target, mode, EXPAND_NORMAL);
4603 return NULL_RTX;
4606 /* Expand expression EXP, which is a call to the strcspn builtin.
4607 Return NULL_RTX if we failed the caller should emit a normal call,
4608 otherwise try to get the result in TARGET, if convenient. */
4610 static rtx
4611 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4613 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4615 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4616 CALL_EXPR_ARG (exp, 1));
4617 if (result)
4618 return expand_expr (result, target, mode, EXPAND_NORMAL);
4620 return NULL_RTX;
4623 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4624 if that's convenient. */
4627 expand_builtin_saveregs (void)
4629 rtx val, seq;
4631 /* Don't do __builtin_saveregs more than once in a function.
4632 Save the result of the first call and reuse it. */
4633 if (saveregs_value != 0)
4634 return saveregs_value;
4636 /* When this function is called, it means that registers must be
4637 saved on entry to this function. So we migrate the call to the
4638 first insn of this function. */
4640 start_sequence ();
4642 /* Do whatever the machine needs done in this case. */
4643 val = targetm.calls.expand_builtin_saveregs ();
4645 seq = get_insns ();
4646 end_sequence ();
4648 saveregs_value = val;
4650 /* Put the insns after the NOTE that starts the function. If this
4651 is inside a start_sequence, make the outer-level insn chain current, so
4652 the code is placed at the start of the function. */
4653 push_topmost_sequence ();
4654 emit_insn_after (seq, entry_of_function ());
4655 pop_topmost_sequence ();
4657 return val;
4660 /* __builtin_args_info (N) returns word N of the arg space info
4661 for the current function. The number and meanings of words
4662 is controlled by the definition of CUMULATIVE_ARGS. */
4664 static rtx
4665 expand_builtin_args_info (tree exp)
4667 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4668 int *word_ptr = (int *) &crtl->args.info;
4670 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4672 if (call_expr_nargs (exp) != 0)
4674 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4675 error ("argument of %<__builtin_args_info%> must be constant");
4676 else
4678 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4680 if (wordnum < 0 || wordnum >= nwords)
4681 error ("argument of %<__builtin_args_info%> out of range");
4682 else
4683 return GEN_INT (word_ptr[wordnum]);
4686 else
4687 error ("missing argument in %<__builtin_args_info%>");
4689 return const0_rtx;
4692 /* Expand a call to __builtin_next_arg. */
4694 static rtx
4695 expand_builtin_next_arg (void)
4697 /* Checking arguments is already done in fold_builtin_next_arg
4698 that must be called before this function. */
4699 return expand_binop (ptr_mode, add_optab,
4700 crtl->args.internal_arg_pointer,
4701 crtl->args.arg_offset_rtx,
4702 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4705 /* Make it easier for the backends by protecting the valist argument
4706 from multiple evaluations. */
4708 static tree
4709 stabilize_va_list (tree valist, int needs_lvalue)
4711 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4713 gcc_assert (vatype != NULL_TREE);
4715 if (TREE_CODE (vatype) == ARRAY_TYPE)
4717 if (TREE_SIDE_EFFECTS (valist))
4718 valist = save_expr (valist);
4720 /* For this case, the backends will be expecting a pointer to
4721 vatype, but it's possible we've actually been given an array
4722 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4723 So fix it. */
4724 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4726 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4727 valist = build_fold_addr_expr_with_type (valist, p1);
4730 else
4732 tree pt;
4734 if (! needs_lvalue)
4736 if (! TREE_SIDE_EFFECTS (valist))
4737 return valist;
4739 pt = build_pointer_type (vatype);
4740 valist = fold_build1 (ADDR_EXPR, pt, valist);
4741 TREE_SIDE_EFFECTS (valist) = 1;
4744 if (TREE_SIDE_EFFECTS (valist))
4745 valist = save_expr (valist);
4746 valist = build_fold_indirect_ref (valist);
4749 return valist;
4752 /* The "standard" definition of va_list is void*. */
4754 tree
4755 std_build_builtin_va_list (void)
4757 return ptr_type_node;
4760 /* The "standard" abi va_list is va_list_type_node. */
4762 tree
4763 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4765 return va_list_type_node;
4768 /* The "standard" type of va_list is va_list_type_node. */
4770 tree
4771 std_canonical_va_list_type (tree type)
4773 tree wtype, htype;
4775 if (INDIRECT_REF_P (type))
4776 type = TREE_TYPE (type);
4777 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4778 type = TREE_TYPE (type);
4779 wtype = va_list_type_node;
4780 htype = type;
4781 /* Treat structure va_list types. */
4782 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4783 htype = TREE_TYPE (htype);
4784 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4786 /* If va_list is an array type, the argument may have decayed
4787 to a pointer type, e.g. by being passed to another function.
4788 In that case, unwrap both types so that we can compare the
4789 underlying records. */
4790 if (TREE_CODE (htype) == ARRAY_TYPE
4791 || POINTER_TYPE_P (htype))
4793 wtype = TREE_TYPE (wtype);
4794 htype = TREE_TYPE (htype);
4797 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4798 return va_list_type_node;
4800 return NULL_TREE;
4803 /* The "standard" implementation of va_start: just assign `nextarg' to
4804 the variable. */
4806 void
4807 std_expand_builtin_va_start (tree valist, rtx nextarg)
4809 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4810 convert_move (va_r, nextarg, 0);
4813 /* Expand EXP, a call to __builtin_va_start. */
4815 static rtx
4816 expand_builtin_va_start (tree exp)
4818 rtx nextarg;
4819 tree valist;
4821 if (call_expr_nargs (exp) < 2)
4823 error ("too few arguments to function %<va_start%>");
4824 return const0_rtx;
4827 if (fold_builtin_next_arg (exp, true))
4828 return const0_rtx;
4830 nextarg = expand_builtin_next_arg ();
4831 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4833 if (targetm.expand_builtin_va_start)
4834 targetm.expand_builtin_va_start (valist, nextarg);
4835 else
4836 std_expand_builtin_va_start (valist, nextarg);
4838 return const0_rtx;
4841 /* The "standard" implementation of va_arg: read the value from the
4842 current (padded) address and increment by the (padded) size. */
4844 tree
4845 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4846 gimple_seq *post_p)
4848 tree addr, t, type_size, rounded_size, valist_tmp;
4849 unsigned HOST_WIDE_INT align, boundary;
4850 bool indirect;
4852 #ifdef ARGS_GROW_DOWNWARD
4853 /* All of the alignment and movement below is for args-grow-up machines.
4854 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4855 implement their own specialized gimplify_va_arg_expr routines. */
4856 gcc_unreachable ();
4857 #endif
4859 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4860 if (indirect)
4861 type = build_pointer_type (type);
4863 align = PARM_BOUNDARY / BITS_PER_UNIT;
4864 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4866 /* When we align parameter on stack for caller, if the parameter
4867 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4868 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4869 here with caller. */
4870 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4871 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4873 boundary /= BITS_PER_UNIT;
4875 /* Hoist the valist value into a temporary for the moment. */
4876 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4878 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4879 requires greater alignment, we must perform dynamic alignment. */
4880 if (boundary > align
4881 && !integer_zerop (TYPE_SIZE (type)))
4883 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4884 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4885 valist_tmp, size_int (boundary - 1)));
4886 gimplify_and_add (t, pre_p);
4888 t = fold_convert (sizetype, valist_tmp);
4889 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4890 fold_convert (TREE_TYPE (valist),
4891 fold_build2 (BIT_AND_EXPR, sizetype, t,
4892 size_int (-boundary))));
4893 gimplify_and_add (t, pre_p);
4895 else
4896 boundary = align;
4898 /* If the actual alignment is less than the alignment of the type,
4899 adjust the type accordingly so that we don't assume strict alignment
4900 when dereferencing the pointer. */
4901 boundary *= BITS_PER_UNIT;
4902 if (boundary < TYPE_ALIGN (type))
4904 type = build_variant_type_copy (type);
4905 TYPE_ALIGN (type) = boundary;
4908 /* Compute the rounded size of the type. */
4909 type_size = size_in_bytes (type);
4910 rounded_size = round_up (type_size, align);
4912 /* Reduce rounded_size so it's sharable with the postqueue. */
4913 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4915 /* Get AP. */
4916 addr = valist_tmp;
4917 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4919 /* Small args are padded downward. */
4920 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4921 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4922 size_binop (MINUS_EXPR, rounded_size, type_size));
4923 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4926 /* Compute new value for AP. */
4927 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4928 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4929 gimplify_and_add (t, pre_p);
4931 addr = fold_convert (build_pointer_type (type), addr);
4933 if (indirect)
4934 addr = build_va_arg_indirect_ref (addr);
4936 return build_va_arg_indirect_ref (addr);
4939 /* Build an indirect-ref expression over the given TREE, which represents a
4940 piece of a va_arg() expansion. */
4941 tree
4942 build_va_arg_indirect_ref (tree addr)
4944 addr = build_fold_indirect_ref (addr);
4946 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4947 mf_mark (addr);
4949 return addr;
4952 /* Return a dummy expression of type TYPE in order to keep going after an
4953 error. */
4955 static tree
4956 dummy_object (tree type)
4958 tree t = build_int_cst (build_pointer_type (type), 0);
4959 return build1 (INDIRECT_REF, type, t);
4962 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4963 builtin function, but a very special sort of operator. */
4965 enum gimplify_status
4966 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4968 tree promoted_type, have_va_type;
4969 tree valist = TREE_OPERAND (*expr_p, 0);
4970 tree type = TREE_TYPE (*expr_p);
4971 tree t;
4973 /* Verify that valist is of the proper type. */
4974 have_va_type = TREE_TYPE (valist);
4975 if (have_va_type == error_mark_node)
4976 return GS_ERROR;
4977 have_va_type = targetm.canonical_va_list_type (have_va_type);
4979 if (have_va_type == NULL_TREE)
4981 error ("first argument to %<va_arg%> not of type %<va_list%>");
4982 return GS_ERROR;
4985 /* Generate a diagnostic for requesting data of a type that cannot
4986 be passed through `...' due to type promotion at the call site. */
4987 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4988 != type)
4990 static bool gave_help;
4991 bool warned;
4993 /* Unfortunately, this is merely undefined, rather than a constraint
4994 violation, so we cannot make this an error. If this call is never
4995 executed, the program is still strictly conforming. */
4996 warned = warning (0, "%qT is promoted to %qT when passed through %<...%>",
4997 type, promoted_type);
4998 if (!gave_help && warned)
5000 gave_help = true;
5001 inform (input_location, "(so you should pass %qT not %qT to %<va_arg%>)",
5002 promoted_type, type);
5005 /* We can, however, treat "undefined" any way we please.
5006 Call abort to encourage the user to fix the program. */
5007 if (warned)
5008 inform (input_location, "if this code is reached, the program will abort");
5009 /* Before the abort, allow the evaluation of the va_list
5010 expression to exit or longjmp. */
5011 gimplify_and_add (valist, pre_p);
5012 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
5013 gimplify_and_add (t, pre_p);
5015 /* This is dead code, but go ahead and finish so that the
5016 mode of the result comes out right. */
5017 *expr_p = dummy_object (type);
5018 return GS_ALL_DONE;
5020 else
5022 /* Make it easier for the backends by protecting the valist argument
5023 from multiple evaluations. */
5024 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5026 /* For this case, the backends will be expecting a pointer to
5027 TREE_TYPE (abi), but it's possible we've
5028 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5029 So fix it. */
5030 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5032 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5033 valist = build_fold_addr_expr_with_type (valist, p1);
5036 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5038 else
5039 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5041 if (!targetm.gimplify_va_arg_expr)
5042 /* FIXME: Once most targets are converted we should merely
5043 assert this is non-null. */
5044 return GS_ALL_DONE;
5046 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5047 return GS_OK;
5051 /* Expand EXP, a call to __builtin_va_end. */
5053 static rtx
5054 expand_builtin_va_end (tree exp)
5056 tree valist = CALL_EXPR_ARG (exp, 0);
5058 /* Evaluate for side effects, if needed. I hate macros that don't
5059 do that. */
5060 if (TREE_SIDE_EFFECTS (valist))
5061 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5063 return const0_rtx;
5066 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5067 builtin rather than just as an assignment in stdarg.h because of the
5068 nastiness of array-type va_list types. */
5070 static rtx
5071 expand_builtin_va_copy (tree exp)
5073 tree dst, src, t;
5075 dst = CALL_EXPR_ARG (exp, 0);
5076 src = CALL_EXPR_ARG (exp, 1);
5078 dst = stabilize_va_list (dst, 1);
5079 src = stabilize_va_list (src, 0);
5081 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5083 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5085 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5086 TREE_SIDE_EFFECTS (t) = 1;
5087 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5089 else
5091 rtx dstb, srcb, size;
5093 /* Evaluate to pointers. */
5094 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5095 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5096 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5097 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5099 dstb = convert_memory_address (Pmode, dstb);
5100 srcb = convert_memory_address (Pmode, srcb);
5102 /* "Dereference" to BLKmode memories. */
5103 dstb = gen_rtx_MEM (BLKmode, dstb);
5104 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5105 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5106 srcb = gen_rtx_MEM (BLKmode, srcb);
5107 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5108 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5110 /* Copy. */
5111 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5114 return const0_rtx;
5117 /* Expand a call to one of the builtin functions __builtin_frame_address or
5118 __builtin_return_address. */
5120 static rtx
5121 expand_builtin_frame_address (tree fndecl, tree exp)
5123 /* The argument must be a nonnegative integer constant.
5124 It counts the number of frames to scan up the stack.
5125 The value is the return address saved in that frame. */
5126 if (call_expr_nargs (exp) == 0)
5127 /* Warning about missing arg was already issued. */
5128 return const0_rtx;
5129 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5131 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5132 error ("invalid argument to %<__builtin_frame_address%>");
5133 else
5134 error ("invalid argument to %<__builtin_return_address%>");
5135 return const0_rtx;
5137 else
5139 rtx tem
5140 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5141 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5143 /* Some ports cannot access arbitrary stack frames. */
5144 if (tem == NULL)
5146 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5147 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5148 else
5149 warning (0, "unsupported argument to %<__builtin_return_address%>");
5150 return const0_rtx;
5153 /* For __builtin_frame_address, return what we've got. */
5154 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5155 return tem;
5157 if (!REG_P (tem)
5158 && ! CONSTANT_P (tem))
5159 tem = copy_to_mode_reg (Pmode, tem);
5160 return tem;
5164 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5165 we failed and the caller should emit a normal call, otherwise try to get
5166 the result in TARGET, if convenient. */
5168 static rtx
5169 expand_builtin_alloca (tree exp, rtx target)
5171 rtx op0;
5172 rtx result;
5174 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5175 should always expand to function calls. These can be intercepted
5176 in libmudflap. */
5177 if (flag_mudflap)
5178 return NULL_RTX;
5180 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5181 return NULL_RTX;
5183 /* Compute the argument. */
5184 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5186 /* Allocate the desired space. */
5187 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5188 result = convert_memory_address (ptr_mode, result);
5190 return result;
5193 /* Expand a call to a bswap builtin with argument ARG0. MODE
5194 is the mode to expand with. */
5196 static rtx
5197 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5199 enum machine_mode mode;
5200 tree arg;
5201 rtx op0;
5203 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5204 return NULL_RTX;
5206 arg = CALL_EXPR_ARG (exp, 0);
5207 mode = TYPE_MODE (TREE_TYPE (arg));
5208 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5210 target = expand_unop (mode, bswap_optab, op0, target, 1);
5212 gcc_assert (target);
5214 return convert_to_mode (mode, target, 0);
5217 /* Expand a call to a unary builtin in EXP.
5218 Return NULL_RTX if a normal call should be emitted rather than expanding the
5219 function in-line. If convenient, the result should be placed in TARGET.
5220 SUBTARGET may be used as the target for computing one of EXP's operands. */
5222 static rtx
5223 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5224 rtx subtarget, optab op_optab)
5226 rtx op0;
5228 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5229 return NULL_RTX;
5231 /* Compute the argument. */
5232 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5233 VOIDmode, EXPAND_NORMAL);
5234 /* Compute op, into TARGET if possible.
5235 Set TARGET to wherever the result comes back. */
5236 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5237 op_optab, op0, target, 1);
5238 gcc_assert (target);
5240 return convert_to_mode (target_mode, target, 0);
5243 /* If the string passed to fputs is a constant and is one character
5244 long, we attempt to transform this call into __builtin_fputc(). */
5246 static rtx
5247 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5249 /* Verify the arguments in the original call. */
5250 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5252 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5253 CALL_EXPR_ARG (exp, 1),
5254 (target == const0_rtx),
5255 unlocked, NULL_TREE);
5256 if (result)
5257 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5259 return NULL_RTX;
5262 /* Expand a call to __builtin_expect. We just return our argument
5263 as the builtin_expect semantic should've been already executed by
5264 tree branch prediction pass. */
5266 static rtx
5267 expand_builtin_expect (tree exp, rtx target)
5269 tree arg, c;
5271 if (call_expr_nargs (exp) < 2)
5272 return const0_rtx;
5273 arg = CALL_EXPR_ARG (exp, 0);
5274 c = CALL_EXPR_ARG (exp, 1);
5276 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5277 /* When guessing was done, the hints should be already stripped away. */
5278 gcc_assert (!flag_guess_branch_prob
5279 || optimize == 0 || errorcount || sorrycount);
5280 return target;
5283 void
5284 expand_builtin_trap (void)
5286 #ifdef HAVE_trap
5287 if (HAVE_trap)
5288 emit_insn (gen_trap ());
5289 else
5290 #endif
5291 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5292 emit_barrier ();
5295 /* Expand EXP, a call to fabs, fabsf or fabsl.
5296 Return NULL_RTX if a normal call should be emitted rather than expanding
5297 the function inline. If convenient, the result should be placed
5298 in TARGET. SUBTARGET may be used as the target for computing
5299 the operand. */
5301 static rtx
5302 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5304 enum machine_mode mode;
5305 tree arg;
5306 rtx op0;
5308 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5309 return NULL_RTX;
5311 arg = CALL_EXPR_ARG (exp, 0);
5312 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5313 mode = TYPE_MODE (TREE_TYPE (arg));
5314 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5315 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5318 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5319 Return NULL is a normal call should be emitted rather than expanding the
5320 function inline. If convenient, the result should be placed in TARGET.
5321 SUBTARGET may be used as the target for computing the operand. */
5323 static rtx
5324 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5326 rtx op0, op1;
5327 tree arg;
5329 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5330 return NULL_RTX;
5332 arg = CALL_EXPR_ARG (exp, 0);
5333 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5335 arg = CALL_EXPR_ARG (exp, 1);
5336 op1 = expand_normal (arg);
5338 return expand_copysign (op0, op1, target);
5341 /* Create a new constant string literal and return a char* pointer to it.
5342 The STRING_CST value is the LEN characters at STR. */
5343 tree
5344 build_string_literal (int len, const char *str)
5346 tree t, elem, index, type;
5348 t = build_string (len, str);
5349 elem = build_type_variant (char_type_node, 1, 0);
5350 index = build_index_type (size_int (len - 1));
5351 type = build_array_type (elem, index);
5352 TREE_TYPE (t) = type;
5353 TREE_CONSTANT (t) = 1;
5354 TREE_READONLY (t) = 1;
5355 TREE_STATIC (t) = 1;
5357 type = build_pointer_type (elem);
5358 t = build1 (ADDR_EXPR, type,
5359 build4 (ARRAY_REF, elem,
5360 t, integer_zero_node, NULL_TREE, NULL_TREE));
5361 return t;
5364 /* Expand EXP, a call to printf or printf_unlocked.
5365 Return NULL_RTX if a normal call should be emitted rather than transforming
5366 the function inline. If convenient, the result should be placed in
5367 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5368 call. */
5369 static rtx
5370 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5371 bool unlocked)
5373 /* If we're using an unlocked function, assume the other unlocked
5374 functions exist explicitly. */
5375 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5376 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5377 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5378 : implicit_built_in_decls[BUILT_IN_PUTS];
5379 const char *fmt_str;
5380 tree fn = 0;
5381 tree fmt, arg;
5382 int nargs = call_expr_nargs (exp);
5384 /* If the return value is used, don't do the transformation. */
5385 if (target != const0_rtx)
5386 return NULL_RTX;
5388 /* Verify the required arguments in the original call. */
5389 if (nargs == 0)
5390 return NULL_RTX;
5391 fmt = CALL_EXPR_ARG (exp, 0);
5392 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5393 return NULL_RTX;
5395 /* Check whether the format is a literal string constant. */
5396 fmt_str = c_getstr (fmt);
5397 if (fmt_str == NULL)
5398 return NULL_RTX;
5400 if (!init_target_chars ())
5401 return NULL_RTX;
5403 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5404 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5406 if ((nargs != 2)
5407 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5408 return NULL_RTX;
5409 if (fn_puts)
5410 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5412 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5413 else if (strcmp (fmt_str, target_percent_c) == 0)
5415 if ((nargs != 2)
5416 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5417 return NULL_RTX;
5418 if (fn_putchar)
5419 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5421 else
5423 /* We can't handle anything else with % args or %% ... yet. */
5424 if (strchr (fmt_str, target_percent))
5425 return NULL_RTX;
5427 if (nargs > 1)
5428 return NULL_RTX;
5430 /* If the format specifier was "", printf does nothing. */
5431 if (fmt_str[0] == '\0')
5432 return const0_rtx;
5433 /* If the format specifier has length of 1, call putchar. */
5434 if (fmt_str[1] == '\0')
5436 /* Given printf("c"), (where c is any one character,)
5437 convert "c"[0] to an int and pass that to the replacement
5438 function. */
5439 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5440 if (fn_putchar)
5441 fn = build_call_expr (fn_putchar, 1, arg);
5443 else
5445 /* If the format specifier was "string\n", call puts("string"). */
5446 size_t len = strlen (fmt_str);
5447 if ((unsigned char)fmt_str[len - 1] == target_newline)
5449 /* Create a NUL-terminated string that's one char shorter
5450 than the original, stripping off the trailing '\n'. */
5451 char *newstr = XALLOCAVEC (char, len);
5452 memcpy (newstr, fmt_str, len - 1);
5453 newstr[len - 1] = 0;
5454 arg = build_string_literal (len, newstr);
5455 if (fn_puts)
5456 fn = build_call_expr (fn_puts, 1, arg);
5458 else
5459 /* We'd like to arrange to call fputs(string,stdout) here,
5460 but we need stdout and don't have a way to get it yet. */
5461 return NULL_RTX;
5465 if (!fn)
5466 return NULL_RTX;
5467 if (TREE_CODE (fn) == CALL_EXPR)
5468 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5469 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5472 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5473 Return NULL_RTX if a normal call should be emitted rather than transforming
5474 the function inline. If convenient, the result should be placed in
5475 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5476 call. */
5477 static rtx
5478 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5479 bool unlocked)
5481 /* If we're using an unlocked function, assume the other unlocked
5482 functions exist explicitly. */
5483 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5484 : implicit_built_in_decls[BUILT_IN_FPUTC];
5485 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5486 : implicit_built_in_decls[BUILT_IN_FPUTS];
5487 const char *fmt_str;
5488 tree fn = 0;
5489 tree fmt, fp, arg;
5490 int nargs = call_expr_nargs (exp);
5492 /* If the return value is used, don't do the transformation. */
5493 if (target != const0_rtx)
5494 return NULL_RTX;
5496 /* Verify the required arguments in the original call. */
5497 if (nargs < 2)
5498 return NULL_RTX;
5499 fp = CALL_EXPR_ARG (exp, 0);
5500 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5501 return NULL_RTX;
5502 fmt = CALL_EXPR_ARG (exp, 1);
5503 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5504 return NULL_RTX;
5506 /* Check whether the format is a literal string constant. */
5507 fmt_str = c_getstr (fmt);
5508 if (fmt_str == NULL)
5509 return NULL_RTX;
5511 if (!init_target_chars ())
5512 return NULL_RTX;
5514 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5515 if (strcmp (fmt_str, target_percent_s) == 0)
5517 if ((nargs != 3)
5518 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5519 return NULL_RTX;
5520 arg = CALL_EXPR_ARG (exp, 2);
5521 if (fn_fputs)
5522 fn = build_call_expr (fn_fputs, 2, arg, fp);
5524 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5525 else if (strcmp (fmt_str, target_percent_c) == 0)
5527 if ((nargs != 3)
5528 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5529 return NULL_RTX;
5530 arg = CALL_EXPR_ARG (exp, 2);
5531 if (fn_fputc)
5532 fn = build_call_expr (fn_fputc, 2, arg, fp);
5534 else
5536 /* We can't handle anything else with % args or %% ... yet. */
5537 if (strchr (fmt_str, target_percent))
5538 return NULL_RTX;
5540 if (nargs > 2)
5541 return NULL_RTX;
5543 /* If the format specifier was "", fprintf does nothing. */
5544 if (fmt_str[0] == '\0')
5546 /* Evaluate and ignore FILE* argument for side-effects. */
5547 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5548 return const0_rtx;
5551 /* When "string" doesn't contain %, replace all cases of
5552 fprintf(stream,string) with fputs(string,stream). The fputs
5553 builtin will take care of special cases like length == 1. */
5554 if (fn_fputs)
5555 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5558 if (!fn)
5559 return NULL_RTX;
5560 if (TREE_CODE (fn) == CALL_EXPR)
5561 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5562 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5565 /* Expand a call EXP to sprintf. Return NULL_RTX if
5566 a normal call should be emitted rather than expanding the function
5567 inline. If convenient, the result should be placed in TARGET with
5568 mode MODE. */
5570 static rtx
5571 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5573 tree dest, fmt;
5574 const char *fmt_str;
5575 int nargs = call_expr_nargs (exp);
5577 /* Verify the required arguments in the original call. */
5578 if (nargs < 2)
5579 return NULL_RTX;
5580 dest = CALL_EXPR_ARG (exp, 0);
5581 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5582 return NULL_RTX;
5583 fmt = CALL_EXPR_ARG (exp, 0);
5584 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5585 return NULL_RTX;
5587 /* Check whether the format is a literal string constant. */
5588 fmt_str = c_getstr (fmt);
5589 if (fmt_str == NULL)
5590 return NULL_RTX;
5592 if (!init_target_chars ())
5593 return NULL_RTX;
5595 /* If the format doesn't contain % args or %%, use strcpy. */
5596 if (strchr (fmt_str, target_percent) == 0)
5598 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5599 tree exp;
5601 if ((nargs > 2) || ! fn)
5602 return NULL_RTX;
5603 expand_expr (build_call_expr (fn, 2, dest, fmt),
5604 const0_rtx, VOIDmode, EXPAND_NORMAL);
5605 if (target == const0_rtx)
5606 return const0_rtx;
5607 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5608 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5610 /* If the format is "%s", use strcpy if the result isn't used. */
5611 else if (strcmp (fmt_str, target_percent_s) == 0)
5613 tree fn, arg, len;
5614 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5616 if (! fn)
5617 return NULL_RTX;
5618 if (nargs != 3)
5619 return NULL_RTX;
5620 arg = CALL_EXPR_ARG (exp, 2);
5621 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5622 return NULL_RTX;
5624 if (target != const0_rtx)
5626 len = c_strlen (arg, 1);
5627 if (! len || TREE_CODE (len) != INTEGER_CST)
5628 return NULL_RTX;
5630 else
5631 len = NULL_TREE;
5633 expand_expr (build_call_expr (fn, 2, dest, arg),
5634 const0_rtx, VOIDmode, EXPAND_NORMAL);
5636 if (target == const0_rtx)
5637 return const0_rtx;
5638 return expand_expr (len, target, mode, EXPAND_NORMAL);
5641 return NULL_RTX;
5644 /* Expand a call to either the entry or exit function profiler. */
5646 static rtx
5647 expand_builtin_profile_func (bool exitp)
5649 rtx this_rtx, which;
5651 this_rtx = DECL_RTL (current_function_decl);
5652 gcc_assert (MEM_P (this_rtx));
5653 this_rtx = XEXP (this_rtx, 0);
5655 if (exitp)
5656 which = profile_function_exit_libfunc;
5657 else
5658 which = profile_function_entry_libfunc;
5660 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5661 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5663 Pmode);
5665 return const0_rtx;
5668 /* Expand a call to __builtin___clear_cache. */
5670 static rtx
5671 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5673 #ifndef HAVE_clear_cache
5674 #ifdef CLEAR_INSN_CACHE
5675 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5676 does something. Just do the default expansion to a call to
5677 __clear_cache(). */
5678 return NULL_RTX;
5679 #else
5680 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5681 does nothing. There is no need to call it. Do nothing. */
5682 return const0_rtx;
5683 #endif /* CLEAR_INSN_CACHE */
5684 #else
5685 /* We have a "clear_cache" insn, and it will handle everything. */
5686 tree begin, end;
5687 rtx begin_rtx, end_rtx;
5688 enum insn_code icode;
5690 /* We must not expand to a library call. If we did, any
5691 fallback library function in libgcc that might contain a call to
5692 __builtin___clear_cache() would recurse infinitely. */
5693 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5695 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5696 return const0_rtx;
5699 if (HAVE_clear_cache)
5701 icode = CODE_FOR_clear_cache;
5703 begin = CALL_EXPR_ARG (exp, 0);
5704 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5705 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5706 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5707 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5709 end = CALL_EXPR_ARG (exp, 1);
5710 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5711 end_rtx = convert_memory_address (Pmode, end_rtx);
5712 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5713 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5715 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5717 return const0_rtx;
5718 #endif /* HAVE_clear_cache */
5721 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5723 static rtx
5724 round_trampoline_addr (rtx tramp)
5726 rtx temp, addend, mask;
5728 /* If we don't need too much alignment, we'll have been guaranteed
5729 proper alignment by get_trampoline_type. */
5730 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5731 return tramp;
5733 /* Round address up to desired boundary. */
5734 temp = gen_reg_rtx (Pmode);
5735 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5736 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5738 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5739 temp, 0, OPTAB_LIB_WIDEN);
5740 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5741 temp, 0, OPTAB_LIB_WIDEN);
5743 return tramp;
5746 static rtx
5747 expand_builtin_init_trampoline (tree exp)
5749 tree t_tramp, t_func, t_chain;
5750 rtx r_tramp, r_func, r_chain;
5751 #ifdef TRAMPOLINE_TEMPLATE
5752 rtx blktramp;
5753 #endif
5755 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5756 POINTER_TYPE, VOID_TYPE))
5757 return NULL_RTX;
5759 t_tramp = CALL_EXPR_ARG (exp, 0);
5760 t_func = CALL_EXPR_ARG (exp, 1);
5761 t_chain = CALL_EXPR_ARG (exp, 2);
5763 r_tramp = expand_normal (t_tramp);
5764 r_func = expand_normal (t_func);
5765 r_chain = expand_normal (t_chain);
5767 /* Generate insns to initialize the trampoline. */
5768 r_tramp = round_trampoline_addr (r_tramp);
5769 #ifdef TRAMPOLINE_TEMPLATE
5770 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5771 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5772 emit_block_move (blktramp, assemble_trampoline_template (),
5773 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5774 #endif
5775 trampolines_created = 1;
5776 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5778 return const0_rtx;
5781 static rtx
5782 expand_builtin_adjust_trampoline (tree exp)
5784 rtx tramp;
5786 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5787 return NULL_RTX;
5789 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5790 tramp = round_trampoline_addr (tramp);
5791 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5792 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5793 #endif
5795 return tramp;
5798 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5799 function. The function first checks whether the back end provides
5800 an insn to implement signbit for the respective mode. If not, it
5801 checks whether the floating point format of the value is such that
5802 the sign bit can be extracted. If that is not the case, the
5803 function returns NULL_RTX to indicate that a normal call should be
5804 emitted rather than expanding the function in-line. EXP is the
5805 expression that is a call to the builtin function; if convenient,
5806 the result should be placed in TARGET. */
5807 static rtx
5808 expand_builtin_signbit (tree exp, rtx target)
5810 const struct real_format *fmt;
5811 enum machine_mode fmode, imode, rmode;
5812 HOST_WIDE_INT hi, lo;
5813 tree arg;
5814 int word, bitpos;
5815 enum insn_code icode;
5816 rtx temp;
5818 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5819 return NULL_RTX;
5821 arg = CALL_EXPR_ARG (exp, 0);
5822 fmode = TYPE_MODE (TREE_TYPE (arg));
5823 rmode = TYPE_MODE (TREE_TYPE (exp));
5824 fmt = REAL_MODE_FORMAT (fmode);
5826 arg = builtin_save_expr (arg);
5828 /* Expand the argument yielding a RTX expression. */
5829 temp = expand_normal (arg);
5831 /* Check if the back end provides an insn that handles signbit for the
5832 argument's mode. */
5833 icode = signbit_optab->handlers [(int) fmode].insn_code;
5834 if (icode != CODE_FOR_nothing)
5836 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5837 emit_unop_insn (icode, target, temp, UNKNOWN);
5838 return target;
5841 /* For floating point formats without a sign bit, implement signbit
5842 as "ARG < 0.0". */
5843 bitpos = fmt->signbit_ro;
5844 if (bitpos < 0)
5846 /* But we can't do this if the format supports signed zero. */
5847 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5848 return NULL_RTX;
5850 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5851 build_real (TREE_TYPE (arg), dconst0));
5852 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5855 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5857 imode = int_mode_for_mode (fmode);
5858 if (imode == BLKmode)
5859 return NULL_RTX;
5860 temp = gen_lowpart (imode, temp);
5862 else
5864 imode = word_mode;
5865 /* Handle targets with different FP word orders. */
5866 if (FLOAT_WORDS_BIG_ENDIAN)
5867 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5868 else
5869 word = bitpos / BITS_PER_WORD;
5870 temp = operand_subword_force (temp, word, fmode);
5871 bitpos = bitpos % BITS_PER_WORD;
5874 /* Force the intermediate word_mode (or narrower) result into a
5875 register. This avoids attempting to create paradoxical SUBREGs
5876 of floating point modes below. */
5877 temp = force_reg (imode, temp);
5879 /* If the bitpos is within the "result mode" lowpart, the operation
5880 can be implement with a single bitwise AND. Otherwise, we need
5881 a right shift and an AND. */
5883 if (bitpos < GET_MODE_BITSIZE (rmode))
5885 if (bitpos < HOST_BITS_PER_WIDE_INT)
5887 hi = 0;
5888 lo = (HOST_WIDE_INT) 1 << bitpos;
5890 else
5892 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5893 lo = 0;
5896 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5897 temp = gen_lowpart (rmode, temp);
5898 temp = expand_binop (rmode, and_optab, temp,
5899 immed_double_const (lo, hi, rmode),
5900 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5902 else
5904 /* Perform a logical right shift to place the signbit in the least
5905 significant bit, then truncate the result to the desired mode
5906 and mask just this bit. */
5907 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5908 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5909 temp = gen_lowpart (rmode, temp);
5910 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5911 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5914 return temp;
5917 /* Expand fork or exec calls. TARGET is the desired target of the
5918 call. EXP is the call. FN is the
5919 identificator of the actual function. IGNORE is nonzero if the
5920 value is to be ignored. */
5922 static rtx
5923 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5925 tree id, decl;
5926 tree call;
5928 /* If we are not profiling, just call the function. */
5929 if (!profile_arc_flag)
5930 return NULL_RTX;
5932 /* Otherwise call the wrapper. This should be equivalent for the rest of
5933 compiler, so the code does not diverge, and the wrapper may run the
5934 code necessary for keeping the profiling sane. */
5936 switch (DECL_FUNCTION_CODE (fn))
5938 case BUILT_IN_FORK:
5939 id = get_identifier ("__gcov_fork");
5940 break;
5942 case BUILT_IN_EXECL:
5943 id = get_identifier ("__gcov_execl");
5944 break;
5946 case BUILT_IN_EXECV:
5947 id = get_identifier ("__gcov_execv");
5948 break;
5950 case BUILT_IN_EXECLP:
5951 id = get_identifier ("__gcov_execlp");
5952 break;
5954 case BUILT_IN_EXECLE:
5955 id = get_identifier ("__gcov_execle");
5956 break;
5958 case BUILT_IN_EXECVP:
5959 id = get_identifier ("__gcov_execvp");
5960 break;
5962 case BUILT_IN_EXECVE:
5963 id = get_identifier ("__gcov_execve");
5964 break;
5966 default:
5967 gcc_unreachable ();
5970 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5971 DECL_EXTERNAL (decl) = 1;
5972 TREE_PUBLIC (decl) = 1;
5973 DECL_ARTIFICIAL (decl) = 1;
5974 TREE_NOTHROW (decl) = 1;
5975 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5976 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5977 call = rewrite_call_expr (exp, 0, decl, 0);
5978 return expand_call (call, target, ignore);
5983 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5984 the pointer in these functions is void*, the tree optimizers may remove
5985 casts. The mode computed in expand_builtin isn't reliable either, due
5986 to __sync_bool_compare_and_swap.
5988 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5989 group of builtins. This gives us log2 of the mode size. */
5991 static inline enum machine_mode
5992 get_builtin_sync_mode (int fcode_diff)
5994 /* The size is not negotiable, so ask not to get BLKmode in return
5995 if the target indicates that a smaller size would be better. */
5996 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5999 /* Expand the memory expression LOC and return the appropriate memory operand
6000 for the builtin_sync operations. */
6002 static rtx
6003 get_builtin_sync_mem (tree loc, enum machine_mode mode)
6005 rtx addr, mem;
6007 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6009 /* Note that we explicitly do not want any alias information for this
6010 memory, so that we kill all other live memories. Otherwise we don't
6011 satisfy the full barrier semantics of the intrinsic. */
6012 mem = validize_mem (gen_rtx_MEM (mode, addr));
6014 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6015 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6016 MEM_VOLATILE_P (mem) = 1;
6018 return mem;
6021 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6022 EXP is the CALL_EXPR. CODE is the rtx code
6023 that corresponds to the arithmetic or logical operation from the name;
6024 an exception here is that NOT actually means NAND. TARGET is an optional
6025 place for us to store the results; AFTER is true if this is the
6026 fetch_and_xxx form. IGNORE is true if we don't actually care about
6027 the result of the operation at all. */
6029 static rtx
6030 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6031 enum rtx_code code, bool after,
6032 rtx target, bool ignore)
6034 rtx val, mem;
6035 enum machine_mode old_mode;
6037 if (code == NOT && warn_sync_nand)
6039 tree fndecl = get_callee_fndecl (exp);
6040 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6042 static bool warned_f_a_n, warned_n_a_f;
6044 switch (fcode)
6046 case BUILT_IN_FETCH_AND_NAND_1:
6047 case BUILT_IN_FETCH_AND_NAND_2:
6048 case BUILT_IN_FETCH_AND_NAND_4:
6049 case BUILT_IN_FETCH_AND_NAND_8:
6050 case BUILT_IN_FETCH_AND_NAND_16:
6052 if (warned_f_a_n)
6053 break;
6055 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6056 inform (input_location,
6057 "%qD changed semantics in GCC 4.4", fndecl);
6058 warned_f_a_n = true;
6059 break;
6061 case BUILT_IN_NAND_AND_FETCH_1:
6062 case BUILT_IN_NAND_AND_FETCH_2:
6063 case BUILT_IN_NAND_AND_FETCH_4:
6064 case BUILT_IN_NAND_AND_FETCH_8:
6065 case BUILT_IN_NAND_AND_FETCH_16:
6067 if (warned_n_a_f)
6068 break;
6070 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6071 inform (input_location,
6072 "%qD changed semantics in GCC 4.4", fndecl);
6073 warned_n_a_f = true;
6074 break;
6076 default:
6077 gcc_unreachable ();
6081 /* Expand the operands. */
6082 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6084 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6085 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6086 of CONST_INTs, where we know the old_mode only from the call argument. */
6087 old_mode = GET_MODE (val);
6088 if (old_mode == VOIDmode)
6089 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6090 val = convert_modes (mode, old_mode, val, 1);
6092 if (ignore)
6093 return expand_sync_operation (mem, val, code);
6094 else
6095 return expand_sync_fetch_operation (mem, val, code, after, target);
6098 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6099 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6100 true if this is the boolean form. TARGET is a place for us to store the
6101 results; this is NOT optional if IS_BOOL is true. */
6103 static rtx
6104 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6105 bool is_bool, rtx target)
6107 rtx old_val, new_val, mem;
6108 enum machine_mode old_mode;
6110 /* Expand the operands. */
6111 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6114 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6115 mode, EXPAND_NORMAL);
6116 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6117 of CONST_INTs, where we know the old_mode only from the call argument. */
6118 old_mode = GET_MODE (old_val);
6119 if (old_mode == VOIDmode)
6120 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6121 old_val = convert_modes (mode, old_mode, old_val, 1);
6123 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6124 mode, EXPAND_NORMAL);
6125 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6126 of CONST_INTs, where we know the old_mode only from the call argument. */
6127 old_mode = GET_MODE (new_val);
6128 if (old_mode == VOIDmode)
6129 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6130 new_val = convert_modes (mode, old_mode, new_val, 1);
6132 if (is_bool)
6133 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6134 else
6135 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6138 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6139 general form is actually an atomic exchange, and some targets only
6140 support a reduced form with the second argument being a constant 1.
6141 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6142 the results. */
6144 static rtx
6145 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6146 rtx target)
6148 rtx val, mem;
6149 enum machine_mode old_mode;
6151 /* Expand the operands. */
6152 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6153 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6154 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6155 of CONST_INTs, where we know the old_mode only from the call argument. */
6156 old_mode = GET_MODE (val);
6157 if (old_mode == VOIDmode)
6158 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6159 val = convert_modes (mode, old_mode, val, 1);
6161 return expand_sync_lock_test_and_set (mem, val, target);
6164 /* Expand the __sync_synchronize intrinsic. */
6166 static void
6167 expand_builtin_synchronize (void)
6169 tree x;
6171 #ifdef HAVE_memory_barrier
6172 if (HAVE_memory_barrier)
6174 emit_insn (gen_memory_barrier ());
6175 return;
6177 #endif
6179 if (synchronize_libfunc != NULL_RTX)
6181 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6182 return;
6185 /* If no explicit memory barrier instruction is available, create an
6186 empty asm stmt with a memory clobber. */
6187 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6188 tree_cons (NULL, build_string (6, "memory"), NULL));
6189 ASM_VOLATILE_P (x) = 1;
6190 expand_asm_expr (x);
6193 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6195 static void
6196 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6198 enum insn_code icode;
6199 rtx mem, insn;
6200 rtx val = const0_rtx;
6202 /* Expand the operands. */
6203 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6205 /* If there is an explicit operation in the md file, use it. */
6206 icode = sync_lock_release[mode];
6207 if (icode != CODE_FOR_nothing)
6209 if (!insn_data[icode].operand[1].predicate (val, mode))
6210 val = force_reg (mode, val);
6212 insn = GEN_FCN (icode) (mem, val);
6213 if (insn)
6215 emit_insn (insn);
6216 return;
6220 /* Otherwise we can implement this operation by emitting a barrier
6221 followed by a store of zero. */
6222 expand_builtin_synchronize ();
6223 emit_move_insn (mem, val);
6226 /* Expand an expression EXP that calls a built-in function,
6227 with result going to TARGET if that's convenient
6228 (and in mode MODE if that's convenient).
6229 SUBTARGET may be used as the target for computing one of EXP's operands.
6230 IGNORE is nonzero if the value is to be ignored. */
6233 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6234 int ignore)
6236 tree fndecl = get_callee_fndecl (exp);
6237 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6238 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6240 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6241 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6243 /* When not optimizing, generate calls to library functions for a certain
6244 set of builtins. */
6245 if (!optimize
6246 && !called_as_built_in (fndecl)
6247 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6248 && fcode != BUILT_IN_ALLOCA
6249 && fcode != BUILT_IN_FREE)
6250 return expand_call (exp, target, ignore);
6252 /* The built-in function expanders test for target == const0_rtx
6253 to determine whether the function's result will be ignored. */
6254 if (ignore)
6255 target = const0_rtx;
6257 /* If the result of a pure or const built-in function is ignored, and
6258 none of its arguments are volatile, we can avoid expanding the
6259 built-in call and just evaluate the arguments for side-effects. */
6260 if (target == const0_rtx
6261 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6263 bool volatilep = false;
6264 tree arg;
6265 call_expr_arg_iterator iter;
6267 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6268 if (TREE_THIS_VOLATILE (arg))
6270 volatilep = true;
6271 break;
6274 if (! volatilep)
6276 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6277 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6278 return const0_rtx;
6282 switch (fcode)
6284 CASE_FLT_FN (BUILT_IN_FABS):
6285 target = expand_builtin_fabs (exp, target, subtarget);
6286 if (target)
6287 return target;
6288 break;
6290 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6291 target = expand_builtin_copysign (exp, target, subtarget);
6292 if (target)
6293 return target;
6294 break;
6296 /* Just do a normal library call if we were unable to fold
6297 the values. */
6298 CASE_FLT_FN (BUILT_IN_CABS):
6299 break;
6301 CASE_FLT_FN (BUILT_IN_EXP):
6302 CASE_FLT_FN (BUILT_IN_EXP10):
6303 CASE_FLT_FN (BUILT_IN_POW10):
6304 CASE_FLT_FN (BUILT_IN_EXP2):
6305 CASE_FLT_FN (BUILT_IN_EXPM1):
6306 CASE_FLT_FN (BUILT_IN_LOGB):
6307 CASE_FLT_FN (BUILT_IN_LOG):
6308 CASE_FLT_FN (BUILT_IN_LOG10):
6309 CASE_FLT_FN (BUILT_IN_LOG2):
6310 CASE_FLT_FN (BUILT_IN_LOG1P):
6311 CASE_FLT_FN (BUILT_IN_TAN):
6312 CASE_FLT_FN (BUILT_IN_ASIN):
6313 CASE_FLT_FN (BUILT_IN_ACOS):
6314 CASE_FLT_FN (BUILT_IN_ATAN):
6315 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6316 because of possible accuracy problems. */
6317 if (! flag_unsafe_math_optimizations)
6318 break;
6319 CASE_FLT_FN (BUILT_IN_SQRT):
6320 CASE_FLT_FN (BUILT_IN_FLOOR):
6321 CASE_FLT_FN (BUILT_IN_CEIL):
6322 CASE_FLT_FN (BUILT_IN_TRUNC):
6323 CASE_FLT_FN (BUILT_IN_ROUND):
6324 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6325 CASE_FLT_FN (BUILT_IN_RINT):
6326 target = expand_builtin_mathfn (exp, target, subtarget);
6327 if (target)
6328 return target;
6329 break;
6331 CASE_FLT_FN (BUILT_IN_ILOGB):
6332 if (! flag_unsafe_math_optimizations)
6333 break;
6334 CASE_FLT_FN (BUILT_IN_ISINF):
6335 CASE_FLT_FN (BUILT_IN_FINITE):
6336 case BUILT_IN_ISFINITE:
6337 case BUILT_IN_ISNORMAL:
6338 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6339 if (target)
6340 return target;
6341 break;
6343 CASE_FLT_FN (BUILT_IN_LCEIL):
6344 CASE_FLT_FN (BUILT_IN_LLCEIL):
6345 CASE_FLT_FN (BUILT_IN_LFLOOR):
6346 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6347 target = expand_builtin_int_roundingfn (exp, target);
6348 if (target)
6349 return target;
6350 break;
6352 CASE_FLT_FN (BUILT_IN_LRINT):
6353 CASE_FLT_FN (BUILT_IN_LLRINT):
6354 CASE_FLT_FN (BUILT_IN_LROUND):
6355 CASE_FLT_FN (BUILT_IN_LLROUND):
6356 target = expand_builtin_int_roundingfn_2 (exp, target);
6357 if (target)
6358 return target;
6359 break;
6361 CASE_FLT_FN (BUILT_IN_POW):
6362 target = expand_builtin_pow (exp, target, subtarget);
6363 if (target)
6364 return target;
6365 break;
6367 CASE_FLT_FN (BUILT_IN_POWI):
6368 target = expand_builtin_powi (exp, target, subtarget);
6369 if (target)
6370 return target;
6371 break;
6373 CASE_FLT_FN (BUILT_IN_ATAN2):
6374 CASE_FLT_FN (BUILT_IN_LDEXP):
6375 CASE_FLT_FN (BUILT_IN_SCALB):
6376 CASE_FLT_FN (BUILT_IN_SCALBN):
6377 CASE_FLT_FN (BUILT_IN_SCALBLN):
6378 if (! flag_unsafe_math_optimizations)
6379 break;
6381 CASE_FLT_FN (BUILT_IN_FMOD):
6382 CASE_FLT_FN (BUILT_IN_REMAINDER):
6383 CASE_FLT_FN (BUILT_IN_DREM):
6384 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6385 if (target)
6386 return target;
6387 break;
6389 CASE_FLT_FN (BUILT_IN_CEXPI):
6390 target = expand_builtin_cexpi (exp, target, subtarget);
6391 gcc_assert (target);
6392 return target;
6394 CASE_FLT_FN (BUILT_IN_SIN):
6395 CASE_FLT_FN (BUILT_IN_COS):
6396 if (! flag_unsafe_math_optimizations)
6397 break;
6398 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6399 if (target)
6400 return target;
6401 break;
6403 CASE_FLT_FN (BUILT_IN_SINCOS):
6404 if (! flag_unsafe_math_optimizations)
6405 break;
6406 target = expand_builtin_sincos (exp);
6407 if (target)
6408 return target;
6409 break;
6411 case BUILT_IN_APPLY_ARGS:
6412 return expand_builtin_apply_args ();
6414 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6415 FUNCTION with a copy of the parameters described by
6416 ARGUMENTS, and ARGSIZE. It returns a block of memory
6417 allocated on the stack into which is stored all the registers
6418 that might possibly be used for returning the result of a
6419 function. ARGUMENTS is the value returned by
6420 __builtin_apply_args. ARGSIZE is the number of bytes of
6421 arguments that must be copied. ??? How should this value be
6422 computed? We'll also need a safe worst case value for varargs
6423 functions. */
6424 case BUILT_IN_APPLY:
6425 if (!validate_arglist (exp, POINTER_TYPE,
6426 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6427 && !validate_arglist (exp, REFERENCE_TYPE,
6428 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6429 return const0_rtx;
6430 else
6432 rtx ops[3];
6434 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6435 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6436 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6438 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6441 /* __builtin_return (RESULT) causes the function to return the
6442 value described by RESULT. RESULT is address of the block of
6443 memory returned by __builtin_apply. */
6444 case BUILT_IN_RETURN:
6445 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6446 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6447 return const0_rtx;
6449 case BUILT_IN_SAVEREGS:
6450 return expand_builtin_saveregs ();
6452 case BUILT_IN_ARGS_INFO:
6453 return expand_builtin_args_info (exp);
6455 case BUILT_IN_VA_ARG_PACK:
6456 /* All valid uses of __builtin_va_arg_pack () are removed during
6457 inlining. */
6458 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6459 return const0_rtx;
6461 case BUILT_IN_VA_ARG_PACK_LEN:
6462 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6463 inlining. */
6464 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6465 return const0_rtx;
6467 /* Return the address of the first anonymous stack arg. */
6468 case BUILT_IN_NEXT_ARG:
6469 if (fold_builtin_next_arg (exp, false))
6470 return const0_rtx;
6471 return expand_builtin_next_arg ();
6473 case BUILT_IN_CLEAR_CACHE:
6474 target = expand_builtin___clear_cache (exp);
6475 if (target)
6476 return target;
6477 break;
6479 case BUILT_IN_CLASSIFY_TYPE:
6480 return expand_builtin_classify_type (exp);
6482 case BUILT_IN_CONSTANT_P:
6483 return const0_rtx;
6485 case BUILT_IN_FRAME_ADDRESS:
6486 case BUILT_IN_RETURN_ADDRESS:
6487 return expand_builtin_frame_address (fndecl, exp);
6489 /* Returns the address of the area where the structure is returned.
6490 0 otherwise. */
6491 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6492 if (call_expr_nargs (exp) != 0
6493 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6494 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6495 return const0_rtx;
6496 else
6497 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6499 case BUILT_IN_ALLOCA:
6500 target = expand_builtin_alloca (exp, target);
6501 if (target)
6502 return target;
6503 break;
6505 case BUILT_IN_STACK_SAVE:
6506 return expand_stack_save ();
6508 case BUILT_IN_STACK_RESTORE:
6509 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6510 return const0_rtx;
6512 case BUILT_IN_BSWAP32:
6513 case BUILT_IN_BSWAP64:
6514 target = expand_builtin_bswap (exp, target, subtarget);
6516 if (target)
6517 return target;
6518 break;
6520 CASE_INT_FN (BUILT_IN_FFS):
6521 case BUILT_IN_FFSIMAX:
6522 target = expand_builtin_unop (target_mode, exp, target,
6523 subtarget, ffs_optab);
6524 if (target)
6525 return target;
6526 break;
6528 CASE_INT_FN (BUILT_IN_CLZ):
6529 case BUILT_IN_CLZIMAX:
6530 target = expand_builtin_unop (target_mode, exp, target,
6531 subtarget, clz_optab);
6532 if (target)
6533 return target;
6534 break;
6536 CASE_INT_FN (BUILT_IN_CTZ):
6537 case BUILT_IN_CTZIMAX:
6538 target = expand_builtin_unop (target_mode, exp, target,
6539 subtarget, ctz_optab);
6540 if (target)
6541 return target;
6542 break;
6544 CASE_INT_FN (BUILT_IN_POPCOUNT):
6545 case BUILT_IN_POPCOUNTIMAX:
6546 target = expand_builtin_unop (target_mode, exp, target,
6547 subtarget, popcount_optab);
6548 if (target)
6549 return target;
6550 break;
6552 CASE_INT_FN (BUILT_IN_PARITY):
6553 case BUILT_IN_PARITYIMAX:
6554 target = expand_builtin_unop (target_mode, exp, target,
6555 subtarget, parity_optab);
6556 if (target)
6557 return target;
6558 break;
6560 case BUILT_IN_STRLEN:
6561 target = expand_builtin_strlen (exp, target, target_mode);
6562 if (target)
6563 return target;
6564 break;
6566 case BUILT_IN_STRCPY:
6567 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6568 if (target)
6569 return target;
6570 break;
6572 case BUILT_IN_STRNCPY:
6573 target = expand_builtin_strncpy (exp, target, mode);
6574 if (target)
6575 return target;
6576 break;
6578 case BUILT_IN_STPCPY:
6579 target = expand_builtin_stpcpy (exp, target, mode);
6580 if (target)
6581 return target;
6582 break;
6584 case BUILT_IN_STRCAT:
6585 target = expand_builtin_strcat (fndecl, exp, target, mode);
6586 if (target)
6587 return target;
6588 break;
6590 case BUILT_IN_STRNCAT:
6591 target = expand_builtin_strncat (exp, target, mode);
6592 if (target)
6593 return target;
6594 break;
6596 case BUILT_IN_STRSPN:
6597 target = expand_builtin_strspn (exp, target, mode);
6598 if (target)
6599 return target;
6600 break;
6602 case BUILT_IN_STRCSPN:
6603 target = expand_builtin_strcspn (exp, target, mode);
6604 if (target)
6605 return target;
6606 break;
6608 case BUILT_IN_STRSTR:
6609 target = expand_builtin_strstr (exp, target, mode);
6610 if (target)
6611 return target;
6612 break;
6614 case BUILT_IN_STRPBRK:
6615 target = expand_builtin_strpbrk (exp, target, mode);
6616 if (target)
6617 return target;
6618 break;
6620 case BUILT_IN_INDEX:
6621 case BUILT_IN_STRCHR:
6622 target = expand_builtin_strchr (exp, target, mode);
6623 if (target)
6624 return target;
6625 break;
6627 case BUILT_IN_RINDEX:
6628 case BUILT_IN_STRRCHR:
6629 target = expand_builtin_strrchr (exp, target, mode);
6630 if (target)
6631 return target;
6632 break;
6634 case BUILT_IN_MEMCPY:
6635 target = expand_builtin_memcpy (exp, target, mode);
6636 if (target)
6637 return target;
6638 break;
6640 case BUILT_IN_MEMPCPY:
6641 target = expand_builtin_mempcpy (exp, target, mode);
6642 if (target)
6643 return target;
6644 break;
6646 case BUILT_IN_MEMMOVE:
6647 target = expand_builtin_memmove (exp, target, mode, ignore);
6648 if (target)
6649 return target;
6650 break;
6652 case BUILT_IN_BCOPY:
6653 target = expand_builtin_bcopy (exp, ignore);
6654 if (target)
6655 return target;
6656 break;
6658 case BUILT_IN_MEMSET:
6659 target = expand_builtin_memset (exp, target, mode);
6660 if (target)
6661 return target;
6662 break;
6664 case BUILT_IN_BZERO:
6665 target = expand_builtin_bzero (exp);
6666 if (target)
6667 return target;
6668 break;
6670 case BUILT_IN_STRCMP:
6671 target = expand_builtin_strcmp (exp, target, mode);
6672 if (target)
6673 return target;
6674 break;
6676 case BUILT_IN_STRNCMP:
6677 target = expand_builtin_strncmp (exp, target, mode);
6678 if (target)
6679 return target;
6680 break;
6682 case BUILT_IN_MEMCHR:
6683 target = expand_builtin_memchr (exp, target, mode);
6684 if (target)
6685 return target;
6686 break;
6688 case BUILT_IN_BCMP:
6689 case BUILT_IN_MEMCMP:
6690 target = expand_builtin_memcmp (exp, target, mode);
6691 if (target)
6692 return target;
6693 break;
6695 case BUILT_IN_SETJMP:
6696 /* This should have been lowered to the builtins below. */
6697 gcc_unreachable ();
6699 case BUILT_IN_SETJMP_SETUP:
6700 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6701 and the receiver label. */
6702 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6704 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6705 VOIDmode, EXPAND_NORMAL);
6706 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6707 rtx label_r = label_rtx (label);
6709 /* This is copied from the handling of non-local gotos. */
6710 expand_builtin_setjmp_setup (buf_addr, label_r);
6711 nonlocal_goto_handler_labels
6712 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6713 nonlocal_goto_handler_labels);
6714 /* ??? Do not let expand_label treat us as such since we would
6715 not want to be both on the list of non-local labels and on
6716 the list of forced labels. */
6717 FORCED_LABEL (label) = 0;
6718 return const0_rtx;
6720 break;
6722 case BUILT_IN_SETJMP_DISPATCHER:
6723 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6724 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6726 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6727 rtx label_r = label_rtx (label);
6729 /* Remove the dispatcher label from the list of non-local labels
6730 since the receiver labels have been added to it above. */
6731 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6732 return const0_rtx;
6734 break;
6736 case BUILT_IN_SETJMP_RECEIVER:
6737 /* __builtin_setjmp_receiver is passed the receiver label. */
6738 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6740 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6741 rtx label_r = label_rtx (label);
6743 expand_builtin_setjmp_receiver (label_r);
6744 return const0_rtx;
6746 break;
6748 /* __builtin_longjmp is passed a pointer to an array of five words.
6749 It's similar to the C library longjmp function but works with
6750 __builtin_setjmp above. */
6751 case BUILT_IN_LONGJMP:
6752 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6754 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6755 VOIDmode, EXPAND_NORMAL);
6756 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6758 if (value != const1_rtx)
6760 error ("%<__builtin_longjmp%> second argument must be 1");
6761 return const0_rtx;
6764 expand_builtin_longjmp (buf_addr, value);
6765 return const0_rtx;
6767 break;
6769 case BUILT_IN_NONLOCAL_GOTO:
6770 target = expand_builtin_nonlocal_goto (exp);
6771 if (target)
6772 return target;
6773 break;
6775 /* This updates the setjmp buffer that is its argument with the value
6776 of the current stack pointer. */
6777 case BUILT_IN_UPDATE_SETJMP_BUF:
6778 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6780 rtx buf_addr
6781 = expand_normal (CALL_EXPR_ARG (exp, 0));
6783 expand_builtin_update_setjmp_buf (buf_addr);
6784 return const0_rtx;
6786 break;
6788 case BUILT_IN_TRAP:
6789 expand_builtin_trap ();
6790 return const0_rtx;
6792 case BUILT_IN_PRINTF:
6793 target = expand_builtin_printf (exp, target, mode, false);
6794 if (target)
6795 return target;
6796 break;
6798 case BUILT_IN_PRINTF_UNLOCKED:
6799 target = expand_builtin_printf (exp, target, mode, true);
6800 if (target)
6801 return target;
6802 break;
6804 case BUILT_IN_FPUTS:
6805 target = expand_builtin_fputs (exp, target, false);
6806 if (target)
6807 return target;
6808 break;
6809 case BUILT_IN_FPUTS_UNLOCKED:
6810 target = expand_builtin_fputs (exp, target, true);
6811 if (target)
6812 return target;
6813 break;
6815 case BUILT_IN_FPRINTF:
6816 target = expand_builtin_fprintf (exp, target, mode, false);
6817 if (target)
6818 return target;
6819 break;
6821 case BUILT_IN_FPRINTF_UNLOCKED:
6822 target = expand_builtin_fprintf (exp, target, mode, true);
6823 if (target)
6824 return target;
6825 break;
6827 case BUILT_IN_SPRINTF:
6828 target = expand_builtin_sprintf (exp, target, mode);
6829 if (target)
6830 return target;
6831 break;
6833 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6834 case BUILT_IN_SIGNBITD32:
6835 case BUILT_IN_SIGNBITD64:
6836 case BUILT_IN_SIGNBITD128:
6837 target = expand_builtin_signbit (exp, target);
6838 if (target)
6839 return target;
6840 break;
6842 /* Various hooks for the DWARF 2 __throw routine. */
6843 case BUILT_IN_UNWIND_INIT:
6844 expand_builtin_unwind_init ();
6845 return const0_rtx;
6846 case BUILT_IN_DWARF_CFA:
6847 return virtual_cfa_rtx;
6848 #ifdef DWARF2_UNWIND_INFO
6849 case BUILT_IN_DWARF_SP_COLUMN:
6850 return expand_builtin_dwarf_sp_column ();
6851 case BUILT_IN_INIT_DWARF_REG_SIZES:
6852 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6853 return const0_rtx;
6854 #endif
6855 case BUILT_IN_FROB_RETURN_ADDR:
6856 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6857 case BUILT_IN_EXTRACT_RETURN_ADDR:
6858 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6859 case BUILT_IN_EH_RETURN:
6860 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6861 CALL_EXPR_ARG (exp, 1));
6862 return const0_rtx;
6863 #ifdef EH_RETURN_DATA_REGNO
6864 case BUILT_IN_EH_RETURN_DATA_REGNO:
6865 return expand_builtin_eh_return_data_regno (exp);
6866 #endif
6867 case BUILT_IN_EXTEND_POINTER:
6868 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6870 case BUILT_IN_VA_START:
6871 return expand_builtin_va_start (exp);
6872 case BUILT_IN_VA_END:
6873 return expand_builtin_va_end (exp);
6874 case BUILT_IN_VA_COPY:
6875 return expand_builtin_va_copy (exp);
6876 case BUILT_IN_EXPECT:
6877 return expand_builtin_expect (exp, target);
6878 case BUILT_IN_PREFETCH:
6879 expand_builtin_prefetch (exp);
6880 return const0_rtx;
6882 case BUILT_IN_PROFILE_FUNC_ENTER:
6883 return expand_builtin_profile_func (false);
6884 case BUILT_IN_PROFILE_FUNC_EXIT:
6885 return expand_builtin_profile_func (true);
6887 case BUILT_IN_INIT_TRAMPOLINE:
6888 return expand_builtin_init_trampoline (exp);
6889 case BUILT_IN_ADJUST_TRAMPOLINE:
6890 return expand_builtin_adjust_trampoline (exp);
6892 case BUILT_IN_FORK:
6893 case BUILT_IN_EXECL:
6894 case BUILT_IN_EXECV:
6895 case BUILT_IN_EXECLP:
6896 case BUILT_IN_EXECLE:
6897 case BUILT_IN_EXECVP:
6898 case BUILT_IN_EXECVE:
6899 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6900 if (target)
6901 return target;
6902 break;
6904 case BUILT_IN_FETCH_AND_ADD_1:
6905 case BUILT_IN_FETCH_AND_ADD_2:
6906 case BUILT_IN_FETCH_AND_ADD_4:
6907 case BUILT_IN_FETCH_AND_ADD_8:
6908 case BUILT_IN_FETCH_AND_ADD_16:
6909 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6910 target = expand_builtin_sync_operation (mode, exp, PLUS,
6911 false, target, ignore);
6912 if (target)
6913 return target;
6914 break;
6916 case BUILT_IN_FETCH_AND_SUB_1:
6917 case BUILT_IN_FETCH_AND_SUB_2:
6918 case BUILT_IN_FETCH_AND_SUB_4:
6919 case BUILT_IN_FETCH_AND_SUB_8:
6920 case BUILT_IN_FETCH_AND_SUB_16:
6921 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6922 target = expand_builtin_sync_operation (mode, exp, MINUS,
6923 false, target, ignore);
6924 if (target)
6925 return target;
6926 break;
6928 case BUILT_IN_FETCH_AND_OR_1:
6929 case BUILT_IN_FETCH_AND_OR_2:
6930 case BUILT_IN_FETCH_AND_OR_4:
6931 case BUILT_IN_FETCH_AND_OR_8:
6932 case BUILT_IN_FETCH_AND_OR_16:
6933 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6934 target = expand_builtin_sync_operation (mode, exp, IOR,
6935 false, target, ignore);
6936 if (target)
6937 return target;
6938 break;
6940 case BUILT_IN_FETCH_AND_AND_1:
6941 case BUILT_IN_FETCH_AND_AND_2:
6942 case BUILT_IN_FETCH_AND_AND_4:
6943 case BUILT_IN_FETCH_AND_AND_8:
6944 case BUILT_IN_FETCH_AND_AND_16:
6945 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6946 target = expand_builtin_sync_operation (mode, exp, AND,
6947 false, target, ignore);
6948 if (target)
6949 return target;
6950 break;
6952 case BUILT_IN_FETCH_AND_XOR_1:
6953 case BUILT_IN_FETCH_AND_XOR_2:
6954 case BUILT_IN_FETCH_AND_XOR_4:
6955 case BUILT_IN_FETCH_AND_XOR_8:
6956 case BUILT_IN_FETCH_AND_XOR_16:
6957 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6958 target = expand_builtin_sync_operation (mode, exp, XOR,
6959 false, target, ignore);
6960 if (target)
6961 return target;
6962 break;
6964 case BUILT_IN_FETCH_AND_NAND_1:
6965 case BUILT_IN_FETCH_AND_NAND_2:
6966 case BUILT_IN_FETCH_AND_NAND_4:
6967 case BUILT_IN_FETCH_AND_NAND_8:
6968 case BUILT_IN_FETCH_AND_NAND_16:
6969 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6970 target = expand_builtin_sync_operation (mode, exp, NOT,
6971 false, target, ignore);
6972 if (target)
6973 return target;
6974 break;
6976 case BUILT_IN_ADD_AND_FETCH_1:
6977 case BUILT_IN_ADD_AND_FETCH_2:
6978 case BUILT_IN_ADD_AND_FETCH_4:
6979 case BUILT_IN_ADD_AND_FETCH_8:
6980 case BUILT_IN_ADD_AND_FETCH_16:
6981 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6982 target = expand_builtin_sync_operation (mode, exp, PLUS,
6983 true, target, ignore);
6984 if (target)
6985 return target;
6986 break;
6988 case BUILT_IN_SUB_AND_FETCH_1:
6989 case BUILT_IN_SUB_AND_FETCH_2:
6990 case BUILT_IN_SUB_AND_FETCH_4:
6991 case BUILT_IN_SUB_AND_FETCH_8:
6992 case BUILT_IN_SUB_AND_FETCH_16:
6993 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6994 target = expand_builtin_sync_operation (mode, exp, MINUS,
6995 true, target, ignore);
6996 if (target)
6997 return target;
6998 break;
7000 case BUILT_IN_OR_AND_FETCH_1:
7001 case BUILT_IN_OR_AND_FETCH_2:
7002 case BUILT_IN_OR_AND_FETCH_4:
7003 case BUILT_IN_OR_AND_FETCH_8:
7004 case BUILT_IN_OR_AND_FETCH_16:
7005 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
7006 target = expand_builtin_sync_operation (mode, exp, IOR,
7007 true, target, ignore);
7008 if (target)
7009 return target;
7010 break;
7012 case BUILT_IN_AND_AND_FETCH_1:
7013 case BUILT_IN_AND_AND_FETCH_2:
7014 case BUILT_IN_AND_AND_FETCH_4:
7015 case BUILT_IN_AND_AND_FETCH_8:
7016 case BUILT_IN_AND_AND_FETCH_16:
7017 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7018 target = expand_builtin_sync_operation (mode, exp, AND,
7019 true, target, ignore);
7020 if (target)
7021 return target;
7022 break;
7024 case BUILT_IN_XOR_AND_FETCH_1:
7025 case BUILT_IN_XOR_AND_FETCH_2:
7026 case BUILT_IN_XOR_AND_FETCH_4:
7027 case BUILT_IN_XOR_AND_FETCH_8:
7028 case BUILT_IN_XOR_AND_FETCH_16:
7029 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7030 target = expand_builtin_sync_operation (mode, exp, XOR,
7031 true, target, ignore);
7032 if (target)
7033 return target;
7034 break;
7036 case BUILT_IN_NAND_AND_FETCH_1:
7037 case BUILT_IN_NAND_AND_FETCH_2:
7038 case BUILT_IN_NAND_AND_FETCH_4:
7039 case BUILT_IN_NAND_AND_FETCH_8:
7040 case BUILT_IN_NAND_AND_FETCH_16:
7041 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7042 target = expand_builtin_sync_operation (mode, exp, NOT,
7043 true, target, ignore);
7044 if (target)
7045 return target;
7046 break;
7048 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7049 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7050 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7051 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7052 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7053 if (mode == VOIDmode)
7054 mode = TYPE_MODE (boolean_type_node);
7055 if (!target || !register_operand (target, mode))
7056 target = gen_reg_rtx (mode);
7058 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7059 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7060 if (target)
7061 return target;
7062 break;
7064 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7065 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7066 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7067 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7068 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7069 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7070 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7071 if (target)
7072 return target;
7073 break;
7075 case BUILT_IN_LOCK_TEST_AND_SET_1:
7076 case BUILT_IN_LOCK_TEST_AND_SET_2:
7077 case BUILT_IN_LOCK_TEST_AND_SET_4:
7078 case BUILT_IN_LOCK_TEST_AND_SET_8:
7079 case BUILT_IN_LOCK_TEST_AND_SET_16:
7080 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7081 target = expand_builtin_lock_test_and_set (mode, exp, target);
7082 if (target)
7083 return target;
7084 break;
7086 case BUILT_IN_LOCK_RELEASE_1:
7087 case BUILT_IN_LOCK_RELEASE_2:
7088 case BUILT_IN_LOCK_RELEASE_4:
7089 case BUILT_IN_LOCK_RELEASE_8:
7090 case BUILT_IN_LOCK_RELEASE_16:
7091 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7092 expand_builtin_lock_release (mode, exp);
7093 return const0_rtx;
7095 case BUILT_IN_SYNCHRONIZE:
7096 expand_builtin_synchronize ();
7097 return const0_rtx;
7099 case BUILT_IN_OBJECT_SIZE:
7100 return expand_builtin_object_size (exp);
7102 case BUILT_IN_MEMCPY_CHK:
7103 case BUILT_IN_MEMPCPY_CHK:
7104 case BUILT_IN_MEMMOVE_CHK:
7105 case BUILT_IN_MEMSET_CHK:
7106 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7107 if (target)
7108 return target;
7109 break;
7111 case BUILT_IN_STRCPY_CHK:
7112 case BUILT_IN_STPCPY_CHK:
7113 case BUILT_IN_STRNCPY_CHK:
7114 case BUILT_IN_STRCAT_CHK:
7115 case BUILT_IN_STRNCAT_CHK:
7116 case BUILT_IN_SNPRINTF_CHK:
7117 case BUILT_IN_VSNPRINTF_CHK:
7118 maybe_emit_chk_warning (exp, fcode);
7119 break;
7121 case BUILT_IN_SPRINTF_CHK:
7122 case BUILT_IN_VSPRINTF_CHK:
7123 maybe_emit_sprintf_chk_warning (exp, fcode);
7124 break;
7126 case BUILT_IN_FREE:
7127 maybe_emit_free_warning (exp);
7128 break;
7130 default: /* just do library call, if unknown builtin */
7131 break;
7134 /* The switch statement above can drop through to cause the function
7135 to be called normally. */
7136 return expand_call (exp, target, ignore);
7139 /* Determine whether a tree node represents a call to a built-in
7140 function. If the tree T is a call to a built-in function with
7141 the right number of arguments of the appropriate types, return
7142 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7143 Otherwise the return value is END_BUILTINS. */
7145 enum built_in_function
7146 builtin_mathfn_code (const_tree t)
7148 const_tree fndecl, arg, parmlist;
7149 const_tree argtype, parmtype;
7150 const_call_expr_arg_iterator iter;
7152 if (TREE_CODE (t) != CALL_EXPR
7153 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7154 return END_BUILTINS;
7156 fndecl = get_callee_fndecl (t);
7157 if (fndecl == NULL_TREE
7158 || TREE_CODE (fndecl) != FUNCTION_DECL
7159 || ! DECL_BUILT_IN (fndecl)
7160 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7161 return END_BUILTINS;
7163 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7164 init_const_call_expr_arg_iterator (t, &iter);
7165 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7167 /* If a function doesn't take a variable number of arguments,
7168 the last element in the list will have type `void'. */
7169 parmtype = TREE_VALUE (parmlist);
7170 if (VOID_TYPE_P (parmtype))
7172 if (more_const_call_expr_args_p (&iter))
7173 return END_BUILTINS;
7174 return DECL_FUNCTION_CODE (fndecl);
7177 if (! more_const_call_expr_args_p (&iter))
7178 return END_BUILTINS;
7180 arg = next_const_call_expr_arg (&iter);
7181 argtype = TREE_TYPE (arg);
7183 if (SCALAR_FLOAT_TYPE_P (parmtype))
7185 if (! SCALAR_FLOAT_TYPE_P (argtype))
7186 return END_BUILTINS;
7188 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7190 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7191 return END_BUILTINS;
7193 else if (POINTER_TYPE_P (parmtype))
7195 if (! POINTER_TYPE_P (argtype))
7196 return END_BUILTINS;
7198 else if (INTEGRAL_TYPE_P (parmtype))
7200 if (! INTEGRAL_TYPE_P (argtype))
7201 return END_BUILTINS;
7203 else
7204 return END_BUILTINS;
7207 /* Variable-length argument list. */
7208 return DECL_FUNCTION_CODE (fndecl);
7211 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7212 evaluate to a constant. */
7214 static tree
7215 fold_builtin_constant_p (tree arg)
7217 /* We return 1 for a numeric type that's known to be a constant
7218 value at compile-time or for an aggregate type that's a
7219 literal constant. */
7220 STRIP_NOPS (arg);
7222 /* If we know this is a constant, emit the constant of one. */
7223 if (CONSTANT_CLASS_P (arg)
7224 || (TREE_CODE (arg) == CONSTRUCTOR
7225 && TREE_CONSTANT (arg)))
7226 return integer_one_node;
7227 if (TREE_CODE (arg) == ADDR_EXPR)
7229 tree op = TREE_OPERAND (arg, 0);
7230 if (TREE_CODE (op) == STRING_CST
7231 || (TREE_CODE (op) == ARRAY_REF
7232 && integer_zerop (TREE_OPERAND (op, 1))
7233 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7234 return integer_one_node;
7237 /* If this expression has side effects, show we don't know it to be a
7238 constant. Likewise if it's a pointer or aggregate type since in
7239 those case we only want literals, since those are only optimized
7240 when generating RTL, not later.
7241 And finally, if we are compiling an initializer, not code, we
7242 need to return a definite result now; there's not going to be any
7243 more optimization done. */
7244 if (TREE_SIDE_EFFECTS (arg)
7245 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7246 || POINTER_TYPE_P (TREE_TYPE (arg))
7247 || cfun == 0
7248 || folding_initializer)
7249 return integer_zero_node;
7251 return NULL_TREE;
7254 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7255 return it as a truthvalue. */
7257 static tree
7258 build_builtin_expect_predicate (tree pred, tree expected)
7260 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7262 fn = built_in_decls[BUILT_IN_EXPECT];
7263 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7264 ret_type = TREE_TYPE (TREE_TYPE (fn));
7265 pred_type = TREE_VALUE (arg_types);
7266 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7268 pred = fold_convert (pred_type, pred);
7269 expected = fold_convert (expected_type, expected);
7270 call_expr = build_call_expr (fn, 2, pred, expected);
7272 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7273 build_int_cst (ret_type, 0));
7276 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7277 NULL_TREE if no simplification is possible. */
7279 static tree
7280 fold_builtin_expect (tree arg0, tree arg1)
7282 tree inner, fndecl;
7283 enum tree_code code;
7285 /* If this is a builtin_expect within a builtin_expect keep the
7286 inner one. See through a comparison against a constant. It
7287 might have been added to create a thruthvalue. */
7288 inner = arg0;
7289 if (COMPARISON_CLASS_P (inner)
7290 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7291 inner = TREE_OPERAND (inner, 0);
7293 if (TREE_CODE (inner) == CALL_EXPR
7294 && (fndecl = get_callee_fndecl (inner))
7295 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7296 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7297 return arg0;
7299 /* Distribute the expected value over short-circuiting operators.
7300 See through the cast from truthvalue_type_node to long. */
7301 inner = arg0;
7302 while (TREE_CODE (inner) == NOP_EXPR
7303 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7304 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7305 inner = TREE_OPERAND (inner, 0);
7307 code = TREE_CODE (inner);
7308 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7310 tree op0 = TREE_OPERAND (inner, 0);
7311 tree op1 = TREE_OPERAND (inner, 1);
7313 op0 = build_builtin_expect_predicate (op0, arg1);
7314 op1 = build_builtin_expect_predicate (op1, arg1);
7315 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7317 return fold_convert (TREE_TYPE (arg0), inner);
7320 /* If the argument isn't invariant then there's nothing else we can do. */
7321 if (!TREE_CONSTANT (arg0))
7322 return NULL_TREE;
7324 /* If we expect that a comparison against the argument will fold to
7325 a constant return the constant. In practice, this means a true
7326 constant or the address of a non-weak symbol. */
7327 inner = arg0;
7328 STRIP_NOPS (inner);
7329 if (TREE_CODE (inner) == ADDR_EXPR)
7333 inner = TREE_OPERAND (inner, 0);
7335 while (TREE_CODE (inner) == COMPONENT_REF
7336 || TREE_CODE (inner) == ARRAY_REF);
7337 if ((TREE_CODE (inner) == VAR_DECL
7338 || TREE_CODE (inner) == FUNCTION_DECL)
7339 && DECL_WEAK (inner))
7340 return NULL_TREE;
7343 /* Otherwise, ARG0 already has the proper type for the return value. */
7344 return arg0;
7347 /* Fold a call to __builtin_classify_type with argument ARG. */
7349 static tree
7350 fold_builtin_classify_type (tree arg)
7352 if (arg == 0)
7353 return build_int_cst (NULL_TREE, no_type_class);
7355 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7358 /* Fold a call to __builtin_strlen with argument ARG. */
7360 static tree
7361 fold_builtin_strlen (tree arg)
7363 if (!validate_arg (arg, POINTER_TYPE))
7364 return NULL_TREE;
7365 else
7367 tree len = c_strlen (arg, 0);
7369 if (len)
7371 /* Convert from the internal "sizetype" type to "size_t". */
7372 if (size_type_node)
7373 len = fold_convert (size_type_node, len);
7374 return len;
7377 return NULL_TREE;
7381 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7383 static tree
7384 fold_builtin_inf (tree type, int warn)
7386 REAL_VALUE_TYPE real;
7388 /* __builtin_inff is intended to be usable to define INFINITY on all
7389 targets. If an infinity is not available, INFINITY expands "to a
7390 positive constant of type float that overflows at translation
7391 time", footnote "In this case, using INFINITY will violate the
7392 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7393 Thus we pedwarn to ensure this constraint violation is
7394 diagnosed. */
7395 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7396 pedwarn (input_location, 0, "target format does not support infinity");
7398 real_inf (&real);
7399 return build_real (type, real);
7402 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7404 static tree
7405 fold_builtin_nan (tree arg, tree type, int quiet)
7407 REAL_VALUE_TYPE real;
7408 const char *str;
7410 if (!validate_arg (arg, POINTER_TYPE))
7411 return NULL_TREE;
7412 str = c_getstr (arg);
7413 if (!str)
7414 return NULL_TREE;
7416 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7417 return NULL_TREE;
7419 return build_real (type, real);
7422 /* Return true if the floating point expression T has an integer value.
7423 We also allow +Inf, -Inf and NaN to be considered integer values. */
7425 static bool
7426 integer_valued_real_p (tree t)
7428 switch (TREE_CODE (t))
7430 case FLOAT_EXPR:
7431 return true;
7433 case ABS_EXPR:
7434 case SAVE_EXPR:
7435 return integer_valued_real_p (TREE_OPERAND (t, 0));
7437 case COMPOUND_EXPR:
7438 case MODIFY_EXPR:
7439 case BIND_EXPR:
7440 return integer_valued_real_p (TREE_OPERAND (t, 1));
7442 case PLUS_EXPR:
7443 case MINUS_EXPR:
7444 case MULT_EXPR:
7445 case MIN_EXPR:
7446 case MAX_EXPR:
7447 return integer_valued_real_p (TREE_OPERAND (t, 0))
7448 && integer_valued_real_p (TREE_OPERAND (t, 1));
7450 case COND_EXPR:
7451 return integer_valued_real_p (TREE_OPERAND (t, 1))
7452 && integer_valued_real_p (TREE_OPERAND (t, 2));
7454 case REAL_CST:
7455 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7457 case NOP_EXPR:
7459 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7460 if (TREE_CODE (type) == INTEGER_TYPE)
7461 return true;
7462 if (TREE_CODE (type) == REAL_TYPE)
7463 return integer_valued_real_p (TREE_OPERAND (t, 0));
7464 break;
7467 case CALL_EXPR:
7468 switch (builtin_mathfn_code (t))
7470 CASE_FLT_FN (BUILT_IN_CEIL):
7471 CASE_FLT_FN (BUILT_IN_FLOOR):
7472 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7473 CASE_FLT_FN (BUILT_IN_RINT):
7474 CASE_FLT_FN (BUILT_IN_ROUND):
7475 CASE_FLT_FN (BUILT_IN_TRUNC):
7476 return true;
7478 CASE_FLT_FN (BUILT_IN_FMIN):
7479 CASE_FLT_FN (BUILT_IN_FMAX):
7480 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7481 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7483 default:
7484 break;
7486 break;
7488 default:
7489 break;
7491 return false;
7494 /* FNDECL is assumed to be a builtin where truncation can be propagated
7495 across (for instance floor((double)f) == (double)floorf (f).
7496 Do the transformation for a call with argument ARG. */
7498 static tree
7499 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7501 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7503 if (!validate_arg (arg, REAL_TYPE))
7504 return NULL_TREE;
7506 /* Integer rounding functions are idempotent. */
7507 if (fcode == builtin_mathfn_code (arg))
7508 return arg;
7510 /* If argument is already integer valued, and we don't need to worry
7511 about setting errno, there's no need to perform rounding. */
7512 if (! flag_errno_math && integer_valued_real_p (arg))
7513 return arg;
7515 if (optimize)
7517 tree arg0 = strip_float_extensions (arg);
7518 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7519 tree newtype = TREE_TYPE (arg0);
7520 tree decl;
7522 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7523 && (decl = mathfn_built_in (newtype, fcode)))
7524 return fold_convert (ftype,
7525 build_call_expr (decl, 1,
7526 fold_convert (newtype, arg0)));
7528 return NULL_TREE;
7531 /* FNDECL is assumed to be builtin which can narrow the FP type of
7532 the argument, for instance lround((double)f) -> lroundf (f).
7533 Do the transformation for a call with argument ARG. */
7535 static tree
7536 fold_fixed_mathfn (tree fndecl, tree arg)
7538 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7540 if (!validate_arg (arg, REAL_TYPE))
7541 return NULL_TREE;
7543 /* If argument is already integer valued, and we don't need to worry
7544 about setting errno, there's no need to perform rounding. */
7545 if (! flag_errno_math && integer_valued_real_p (arg))
7546 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7548 if (optimize)
7550 tree ftype = TREE_TYPE (arg);
7551 tree arg0 = strip_float_extensions (arg);
7552 tree newtype = TREE_TYPE (arg0);
7553 tree decl;
7555 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7556 && (decl = mathfn_built_in (newtype, fcode)))
7557 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7560 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7561 sizeof (long long) == sizeof (long). */
7562 if (TYPE_PRECISION (long_long_integer_type_node)
7563 == TYPE_PRECISION (long_integer_type_node))
7565 tree newfn = NULL_TREE;
7566 switch (fcode)
7568 CASE_FLT_FN (BUILT_IN_LLCEIL):
7569 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7570 break;
7572 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7573 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7574 break;
7576 CASE_FLT_FN (BUILT_IN_LLROUND):
7577 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7578 break;
7580 CASE_FLT_FN (BUILT_IN_LLRINT):
7581 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7582 break;
7584 default:
7585 break;
7588 if (newfn)
7590 tree newcall = build_call_expr(newfn, 1, arg);
7591 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7595 return NULL_TREE;
7598 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7599 return type. Return NULL_TREE if no simplification can be made. */
7601 static tree
7602 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7604 tree res;
7606 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7607 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7608 return NULL_TREE;
7610 /* Calculate the result when the argument is a constant. */
7611 if (TREE_CODE (arg) == COMPLEX_CST
7612 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7613 type, mpfr_hypot)))
7614 return res;
7616 if (TREE_CODE (arg) == COMPLEX_EXPR)
7618 tree real = TREE_OPERAND (arg, 0);
7619 tree imag = TREE_OPERAND (arg, 1);
7621 /* If either part is zero, cabs is fabs of the other. */
7622 if (real_zerop (real))
7623 return fold_build1 (ABS_EXPR, type, imag);
7624 if (real_zerop (imag))
7625 return fold_build1 (ABS_EXPR, type, real);
7627 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7628 if (flag_unsafe_math_optimizations
7629 && operand_equal_p (real, imag, OEP_PURE_SAME))
7631 const REAL_VALUE_TYPE sqrt2_trunc
7632 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7633 STRIP_NOPS (real);
7634 return fold_build2 (MULT_EXPR, type,
7635 fold_build1 (ABS_EXPR, type, real),
7636 build_real (type, sqrt2_trunc));
7640 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7641 if (TREE_CODE (arg) == NEGATE_EXPR
7642 || TREE_CODE (arg) == CONJ_EXPR)
7643 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7645 /* Don't do this when optimizing for size. */
7646 if (flag_unsafe_math_optimizations
7647 && optimize && optimize_function_for_speed_p (cfun))
7649 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7651 if (sqrtfn != NULL_TREE)
7653 tree rpart, ipart, result;
7655 arg = builtin_save_expr (arg);
7657 rpart = fold_build1 (REALPART_EXPR, type, arg);
7658 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7660 rpart = builtin_save_expr (rpart);
7661 ipart = builtin_save_expr (ipart);
7663 result = fold_build2 (PLUS_EXPR, type,
7664 fold_build2 (MULT_EXPR, type,
7665 rpart, rpart),
7666 fold_build2 (MULT_EXPR, type,
7667 ipart, ipart));
7669 return build_call_expr (sqrtfn, 1, result);
7673 return NULL_TREE;
7676 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7677 Return NULL_TREE if no simplification can be made. */
7679 static tree
7680 fold_builtin_sqrt (tree arg, tree type)
7683 enum built_in_function fcode;
7684 tree res;
7686 if (!validate_arg (arg, REAL_TYPE))
7687 return NULL_TREE;
7689 /* Calculate the result when the argument is a constant. */
7690 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7691 return res;
7693 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7694 fcode = builtin_mathfn_code (arg);
7695 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7697 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7698 arg = fold_build2 (MULT_EXPR, type,
7699 CALL_EXPR_ARG (arg, 0),
7700 build_real (type, dconsthalf));
7701 return build_call_expr (expfn, 1, arg);
7704 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7705 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7707 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7709 if (powfn)
7711 tree arg0 = CALL_EXPR_ARG (arg, 0);
7712 tree tree_root;
7713 /* The inner root was either sqrt or cbrt. */
7714 /* This was a conditional expression but it triggered a bug
7715 in Sun C 5.5. */
7716 REAL_VALUE_TYPE dconstroot;
7717 if (BUILTIN_SQRT_P (fcode))
7718 dconstroot = dconsthalf;
7719 else
7720 dconstroot = dconst_third ();
7722 /* Adjust for the outer root. */
7723 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7724 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7725 tree_root = build_real (type, dconstroot);
7726 return build_call_expr (powfn, 2, arg0, tree_root);
7730 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7731 if (flag_unsafe_math_optimizations
7732 && (fcode == BUILT_IN_POW
7733 || fcode == BUILT_IN_POWF
7734 || fcode == BUILT_IN_POWL))
7736 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7737 tree arg0 = CALL_EXPR_ARG (arg, 0);
7738 tree arg1 = CALL_EXPR_ARG (arg, 1);
7739 tree narg1;
7740 if (!tree_expr_nonnegative_p (arg0))
7741 arg0 = build1 (ABS_EXPR, type, arg0);
7742 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7743 build_real (type, dconsthalf));
7744 return build_call_expr (powfn, 2, arg0, narg1);
7747 return NULL_TREE;
7750 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7751 Return NULL_TREE if no simplification can be made. */
7753 static tree
7754 fold_builtin_cbrt (tree arg, tree type)
7756 const enum built_in_function fcode = builtin_mathfn_code (arg);
7757 tree res;
7759 if (!validate_arg (arg, REAL_TYPE))
7760 return NULL_TREE;
7762 /* Calculate the result when the argument is a constant. */
7763 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7764 return res;
7766 if (flag_unsafe_math_optimizations)
7768 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7769 if (BUILTIN_EXPONENT_P (fcode))
7771 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7772 const REAL_VALUE_TYPE third_trunc =
7773 real_value_truncate (TYPE_MODE (type), dconst_third ());
7774 arg = fold_build2 (MULT_EXPR, type,
7775 CALL_EXPR_ARG (arg, 0),
7776 build_real (type, third_trunc));
7777 return build_call_expr (expfn, 1, arg);
7780 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7781 if (BUILTIN_SQRT_P (fcode))
7783 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7785 if (powfn)
7787 tree arg0 = CALL_EXPR_ARG (arg, 0);
7788 tree tree_root;
7789 REAL_VALUE_TYPE dconstroot = dconst_third ();
7791 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7792 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7793 tree_root = build_real (type, dconstroot);
7794 return build_call_expr (powfn, 2, arg0, tree_root);
7798 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7799 if (BUILTIN_CBRT_P (fcode))
7801 tree arg0 = CALL_EXPR_ARG (arg, 0);
7802 if (tree_expr_nonnegative_p (arg0))
7804 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7806 if (powfn)
7808 tree tree_root;
7809 REAL_VALUE_TYPE dconstroot;
7811 real_arithmetic (&dconstroot, MULT_EXPR,
7812 dconst_third_ptr (), dconst_third_ptr ());
7813 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7814 tree_root = build_real (type, dconstroot);
7815 return build_call_expr (powfn, 2, arg0, tree_root);
7820 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7821 if (fcode == BUILT_IN_POW
7822 || fcode == BUILT_IN_POWF
7823 || fcode == BUILT_IN_POWL)
7825 tree arg00 = CALL_EXPR_ARG (arg, 0);
7826 tree arg01 = CALL_EXPR_ARG (arg, 1);
7827 if (tree_expr_nonnegative_p (arg00))
7829 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7830 const REAL_VALUE_TYPE dconstroot
7831 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7832 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7833 build_real (type, dconstroot));
7834 return build_call_expr (powfn, 2, arg00, narg01);
7838 return NULL_TREE;
7841 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7842 TYPE is the type of the return value. Return NULL_TREE if no
7843 simplification can be made. */
7845 static tree
7846 fold_builtin_cos (tree arg, tree type, tree fndecl)
7848 tree res, narg;
7850 if (!validate_arg (arg, REAL_TYPE))
7851 return NULL_TREE;
7853 /* Calculate the result when the argument is a constant. */
7854 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7855 return res;
7857 /* Optimize cos(-x) into cos (x). */
7858 if ((narg = fold_strip_sign_ops (arg)))
7859 return build_call_expr (fndecl, 1, narg);
7861 return NULL_TREE;
7864 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7865 Return NULL_TREE if no simplification can be made. */
7867 static tree
7868 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7870 if (validate_arg (arg, REAL_TYPE))
7872 tree res, narg;
7874 /* Calculate the result when the argument is a constant. */
7875 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7876 return res;
7878 /* Optimize cosh(-x) into cosh (x). */
7879 if ((narg = fold_strip_sign_ops (arg)))
7880 return build_call_expr (fndecl, 1, narg);
7883 return NULL_TREE;
7886 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7887 Return NULL_TREE if no simplification can be made. */
7889 static tree
7890 fold_builtin_tan (tree arg, tree type)
7892 enum built_in_function fcode;
7893 tree res;
7895 if (!validate_arg (arg, REAL_TYPE))
7896 return NULL_TREE;
7898 /* Calculate the result when the argument is a constant. */
7899 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7900 return res;
7902 /* Optimize tan(atan(x)) = x. */
7903 fcode = builtin_mathfn_code (arg);
7904 if (flag_unsafe_math_optimizations
7905 && (fcode == BUILT_IN_ATAN
7906 || fcode == BUILT_IN_ATANF
7907 || fcode == BUILT_IN_ATANL))
7908 return CALL_EXPR_ARG (arg, 0);
7910 return NULL_TREE;
7913 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7914 NULL_TREE if no simplification can be made. */
7916 static tree
7917 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7919 tree type;
7920 tree res, fn, call;
7922 if (!validate_arg (arg0, REAL_TYPE)
7923 || !validate_arg (arg1, POINTER_TYPE)
7924 || !validate_arg (arg2, POINTER_TYPE))
7925 return NULL_TREE;
7927 type = TREE_TYPE (arg0);
7929 /* Calculate the result when the argument is a constant. */
7930 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7931 return res;
7933 /* Canonicalize sincos to cexpi. */
7934 if (!TARGET_C99_FUNCTIONS)
7935 return NULL_TREE;
7936 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7937 if (!fn)
7938 return NULL_TREE;
7940 call = build_call_expr (fn, 1, arg0);
7941 call = builtin_save_expr (call);
7943 return build2 (COMPOUND_EXPR, void_type_node,
7944 build2 (MODIFY_EXPR, void_type_node,
7945 build_fold_indirect_ref (arg1),
7946 build1 (IMAGPART_EXPR, type, call)),
7947 build2 (MODIFY_EXPR, void_type_node,
7948 build_fold_indirect_ref (arg2),
7949 build1 (REALPART_EXPR, type, call)));
7952 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7953 NULL_TREE if no simplification can be made. */
7955 static tree
7956 fold_builtin_cexp (tree arg0, tree type)
7958 tree rtype;
7959 tree realp, imagp, ifn;
7961 if (!validate_arg (arg0, COMPLEX_TYPE))
7962 return NULL_TREE;
7964 rtype = TREE_TYPE (TREE_TYPE (arg0));
7966 /* In case we can figure out the real part of arg0 and it is constant zero
7967 fold to cexpi. */
7968 if (!TARGET_C99_FUNCTIONS)
7969 return NULL_TREE;
7970 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7971 if (!ifn)
7972 return NULL_TREE;
7974 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7975 && real_zerop (realp))
7977 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7978 return build_call_expr (ifn, 1, narg);
7981 /* In case we can easily decompose real and imaginary parts split cexp
7982 to exp (r) * cexpi (i). */
7983 if (flag_unsafe_math_optimizations
7984 && realp)
7986 tree rfn, rcall, icall;
7988 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7989 if (!rfn)
7990 return NULL_TREE;
7992 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7993 if (!imagp)
7994 return NULL_TREE;
7996 icall = build_call_expr (ifn, 1, imagp);
7997 icall = builtin_save_expr (icall);
7998 rcall = build_call_expr (rfn, 1, realp);
7999 rcall = builtin_save_expr (rcall);
8000 return fold_build2 (COMPLEX_EXPR, type,
8001 fold_build2 (MULT_EXPR, rtype,
8002 rcall,
8003 fold_build1 (REALPART_EXPR, rtype, icall)),
8004 fold_build2 (MULT_EXPR, rtype,
8005 rcall,
8006 fold_build1 (IMAGPART_EXPR, rtype, icall)));
8009 return NULL_TREE;
8012 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8013 Return NULL_TREE if no simplification can be made. */
8015 static tree
8016 fold_builtin_trunc (tree fndecl, tree arg)
8018 if (!validate_arg (arg, REAL_TYPE))
8019 return NULL_TREE;
8021 /* Optimize trunc of constant value. */
8022 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8024 REAL_VALUE_TYPE r, x;
8025 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8027 x = TREE_REAL_CST (arg);
8028 real_trunc (&r, TYPE_MODE (type), &x);
8029 return build_real (type, r);
8032 return fold_trunc_transparent_mathfn (fndecl, arg);
8035 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8036 Return NULL_TREE if no simplification can be made. */
8038 static tree
8039 fold_builtin_floor (tree fndecl, tree arg)
8041 if (!validate_arg (arg, REAL_TYPE))
8042 return NULL_TREE;
8044 /* Optimize floor of constant value. */
8045 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8047 REAL_VALUE_TYPE x;
8049 x = TREE_REAL_CST (arg);
8050 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8052 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8053 REAL_VALUE_TYPE r;
8055 real_floor (&r, TYPE_MODE (type), &x);
8056 return build_real (type, r);
8060 /* Fold floor (x) where x is nonnegative to trunc (x). */
8061 if (tree_expr_nonnegative_p (arg))
8063 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8064 if (truncfn)
8065 return build_call_expr (truncfn, 1, arg);
8068 return fold_trunc_transparent_mathfn (fndecl, arg);
8071 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8072 Return NULL_TREE if no simplification can be made. */
8074 static tree
8075 fold_builtin_ceil (tree fndecl, tree arg)
8077 if (!validate_arg (arg, REAL_TYPE))
8078 return NULL_TREE;
8080 /* Optimize ceil of constant value. */
8081 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8083 REAL_VALUE_TYPE x;
8085 x = TREE_REAL_CST (arg);
8086 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8088 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8089 REAL_VALUE_TYPE r;
8091 real_ceil (&r, TYPE_MODE (type), &x);
8092 return build_real (type, r);
8096 return fold_trunc_transparent_mathfn (fndecl, arg);
8099 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8100 Return NULL_TREE if no simplification can be made. */
8102 static tree
8103 fold_builtin_round (tree fndecl, tree arg)
8105 if (!validate_arg (arg, REAL_TYPE))
8106 return NULL_TREE;
8108 /* Optimize round of constant value. */
8109 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8111 REAL_VALUE_TYPE x;
8113 x = TREE_REAL_CST (arg);
8114 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8116 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8117 REAL_VALUE_TYPE r;
8119 real_round (&r, TYPE_MODE (type), &x);
8120 return build_real (type, r);
8124 return fold_trunc_transparent_mathfn (fndecl, arg);
8127 /* Fold function call to builtin lround, lroundf or lroundl (or the
8128 corresponding long long versions) and other rounding functions. ARG
8129 is the argument to the call. Return NULL_TREE if no simplification
8130 can be made. */
8132 static tree
8133 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8135 if (!validate_arg (arg, REAL_TYPE))
8136 return NULL_TREE;
8138 /* Optimize lround of constant value. */
8139 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8141 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8143 if (real_isfinite (&x))
8145 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8146 tree ftype = TREE_TYPE (arg);
8147 unsigned HOST_WIDE_INT lo2;
8148 HOST_WIDE_INT hi, lo;
8149 REAL_VALUE_TYPE r;
8151 switch (DECL_FUNCTION_CODE (fndecl))
8153 CASE_FLT_FN (BUILT_IN_LFLOOR):
8154 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8155 real_floor (&r, TYPE_MODE (ftype), &x);
8156 break;
8158 CASE_FLT_FN (BUILT_IN_LCEIL):
8159 CASE_FLT_FN (BUILT_IN_LLCEIL):
8160 real_ceil (&r, TYPE_MODE (ftype), &x);
8161 break;
8163 CASE_FLT_FN (BUILT_IN_LROUND):
8164 CASE_FLT_FN (BUILT_IN_LLROUND):
8165 real_round (&r, TYPE_MODE (ftype), &x);
8166 break;
8168 default:
8169 gcc_unreachable ();
8172 REAL_VALUE_TO_INT (&lo, &hi, r);
8173 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8174 return build_int_cst_wide (itype, lo2, hi);
8178 switch (DECL_FUNCTION_CODE (fndecl))
8180 CASE_FLT_FN (BUILT_IN_LFLOOR):
8181 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8182 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8183 if (tree_expr_nonnegative_p (arg))
8184 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8185 arg);
8186 break;
8187 default:;
8190 return fold_fixed_mathfn (fndecl, arg);
8193 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8194 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8195 the argument to the call. Return NULL_TREE if no simplification can
8196 be made. */
8198 static tree
8199 fold_builtin_bitop (tree fndecl, tree arg)
8201 if (!validate_arg (arg, INTEGER_TYPE))
8202 return NULL_TREE;
8204 /* Optimize for constant argument. */
8205 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8207 HOST_WIDE_INT hi, width, result;
8208 unsigned HOST_WIDE_INT lo;
8209 tree type;
8211 type = TREE_TYPE (arg);
8212 width = TYPE_PRECISION (type);
8213 lo = TREE_INT_CST_LOW (arg);
8215 /* Clear all the bits that are beyond the type's precision. */
8216 if (width > HOST_BITS_PER_WIDE_INT)
8218 hi = TREE_INT_CST_HIGH (arg);
8219 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8220 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8222 else
8224 hi = 0;
8225 if (width < HOST_BITS_PER_WIDE_INT)
8226 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8229 switch (DECL_FUNCTION_CODE (fndecl))
8231 CASE_INT_FN (BUILT_IN_FFS):
8232 if (lo != 0)
8233 result = exact_log2 (lo & -lo) + 1;
8234 else if (hi != 0)
8235 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8236 else
8237 result = 0;
8238 break;
8240 CASE_INT_FN (BUILT_IN_CLZ):
8241 if (hi != 0)
8242 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8243 else if (lo != 0)
8244 result = width - floor_log2 (lo) - 1;
8245 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8246 result = width;
8247 break;
8249 CASE_INT_FN (BUILT_IN_CTZ):
8250 if (lo != 0)
8251 result = exact_log2 (lo & -lo);
8252 else if (hi != 0)
8253 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8254 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8255 result = width;
8256 break;
8258 CASE_INT_FN (BUILT_IN_POPCOUNT):
8259 result = 0;
8260 while (lo)
8261 result++, lo &= lo - 1;
8262 while (hi)
8263 result++, hi &= hi - 1;
8264 break;
8266 CASE_INT_FN (BUILT_IN_PARITY):
8267 result = 0;
8268 while (lo)
8269 result++, lo &= lo - 1;
8270 while (hi)
8271 result++, hi &= hi - 1;
8272 result &= 1;
8273 break;
8275 default:
8276 gcc_unreachable ();
8279 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8282 return NULL_TREE;
8285 /* Fold function call to builtin_bswap and the long and long long
8286 variants. Return NULL_TREE if no simplification can be made. */
8287 static tree
8288 fold_builtin_bswap (tree fndecl, tree arg)
8290 if (! validate_arg (arg, INTEGER_TYPE))
8291 return NULL_TREE;
8293 /* Optimize constant value. */
8294 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8296 HOST_WIDE_INT hi, width, r_hi = 0;
8297 unsigned HOST_WIDE_INT lo, r_lo = 0;
8298 tree type;
8300 type = TREE_TYPE (arg);
8301 width = TYPE_PRECISION (type);
8302 lo = TREE_INT_CST_LOW (arg);
8303 hi = TREE_INT_CST_HIGH (arg);
8305 switch (DECL_FUNCTION_CODE (fndecl))
8307 case BUILT_IN_BSWAP32:
8308 case BUILT_IN_BSWAP64:
8310 int s;
8312 for (s = 0; s < width; s += 8)
8314 int d = width - s - 8;
8315 unsigned HOST_WIDE_INT byte;
8317 if (s < HOST_BITS_PER_WIDE_INT)
8318 byte = (lo >> s) & 0xff;
8319 else
8320 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8322 if (d < HOST_BITS_PER_WIDE_INT)
8323 r_lo |= byte << d;
8324 else
8325 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8329 break;
8331 default:
8332 gcc_unreachable ();
8335 if (width < HOST_BITS_PER_WIDE_INT)
8336 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8337 else
8338 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8341 return NULL_TREE;
8344 /* A subroutine of fold_builtin to fold the various logarithmic
8345 functions. Return NULL_TREE if no simplification can me made.
8346 FUNC is the corresponding MPFR logarithm function. */
8348 static tree
8349 fold_builtin_logarithm (tree fndecl, tree arg,
8350 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8352 if (validate_arg (arg, REAL_TYPE))
8354 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8355 tree res;
8356 const enum built_in_function fcode = builtin_mathfn_code (arg);
8358 /* Calculate the result when the argument is a constant. */
8359 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8360 return res;
8362 /* Special case, optimize logN(expN(x)) = x. */
8363 if (flag_unsafe_math_optimizations
8364 && ((func == mpfr_log
8365 && (fcode == BUILT_IN_EXP
8366 || fcode == BUILT_IN_EXPF
8367 || fcode == BUILT_IN_EXPL))
8368 || (func == mpfr_log2
8369 && (fcode == BUILT_IN_EXP2
8370 || fcode == BUILT_IN_EXP2F
8371 || fcode == BUILT_IN_EXP2L))
8372 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8373 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8375 /* Optimize logN(func()) for various exponential functions. We
8376 want to determine the value "x" and the power "exponent" in
8377 order to transform logN(x**exponent) into exponent*logN(x). */
8378 if (flag_unsafe_math_optimizations)
8380 tree exponent = 0, x = 0;
8382 switch (fcode)
8384 CASE_FLT_FN (BUILT_IN_EXP):
8385 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8386 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8387 dconst_e ()));
8388 exponent = CALL_EXPR_ARG (arg, 0);
8389 break;
8390 CASE_FLT_FN (BUILT_IN_EXP2):
8391 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8392 x = build_real (type, dconst2);
8393 exponent = CALL_EXPR_ARG (arg, 0);
8394 break;
8395 CASE_FLT_FN (BUILT_IN_EXP10):
8396 CASE_FLT_FN (BUILT_IN_POW10):
8397 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8399 REAL_VALUE_TYPE dconst10;
8400 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8401 x = build_real (type, dconst10);
8403 exponent = CALL_EXPR_ARG (arg, 0);
8404 break;
8405 CASE_FLT_FN (BUILT_IN_SQRT):
8406 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8407 x = CALL_EXPR_ARG (arg, 0);
8408 exponent = build_real (type, dconsthalf);
8409 break;
8410 CASE_FLT_FN (BUILT_IN_CBRT):
8411 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8412 x = CALL_EXPR_ARG (arg, 0);
8413 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8414 dconst_third ()));
8415 break;
8416 CASE_FLT_FN (BUILT_IN_POW):
8417 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8418 x = CALL_EXPR_ARG (arg, 0);
8419 exponent = CALL_EXPR_ARG (arg, 1);
8420 break;
8421 default:
8422 break;
8425 /* Now perform the optimization. */
8426 if (x && exponent)
8428 tree logfn = build_call_expr (fndecl, 1, x);
8429 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8434 return NULL_TREE;
8437 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8438 NULL_TREE if no simplification can be made. */
8440 static tree
8441 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8443 tree res, narg0, narg1;
8445 if (!validate_arg (arg0, REAL_TYPE)
8446 || !validate_arg (arg1, REAL_TYPE))
8447 return NULL_TREE;
8449 /* Calculate the result when the argument is a constant. */
8450 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8451 return res;
8453 /* If either argument to hypot has a negate or abs, strip that off.
8454 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8455 narg0 = fold_strip_sign_ops (arg0);
8456 narg1 = fold_strip_sign_ops (arg1);
8457 if (narg0 || narg1)
8459 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8460 narg1 ? narg1 : arg1);
8463 /* If either argument is zero, hypot is fabs of the other. */
8464 if (real_zerop (arg0))
8465 return fold_build1 (ABS_EXPR, type, arg1);
8466 else if (real_zerop (arg1))
8467 return fold_build1 (ABS_EXPR, type, arg0);
8469 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8470 if (flag_unsafe_math_optimizations
8471 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8473 const REAL_VALUE_TYPE sqrt2_trunc
8474 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8475 return fold_build2 (MULT_EXPR, type,
8476 fold_build1 (ABS_EXPR, type, arg0),
8477 build_real (type, sqrt2_trunc));
8480 return NULL_TREE;
8484 /* Fold a builtin function call to pow, powf, or powl. Return
8485 NULL_TREE if no simplification can be made. */
8486 static tree
8487 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8489 tree res;
8491 if (!validate_arg (arg0, REAL_TYPE)
8492 || !validate_arg (arg1, REAL_TYPE))
8493 return NULL_TREE;
8495 /* Calculate the result when the argument is a constant. */
8496 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8497 return res;
8499 /* Optimize pow(1.0,y) = 1.0. */
8500 if (real_onep (arg0))
8501 return omit_one_operand (type, build_real (type, dconst1), arg1);
8503 if (TREE_CODE (arg1) == REAL_CST
8504 && !TREE_OVERFLOW (arg1))
8506 REAL_VALUE_TYPE cint;
8507 REAL_VALUE_TYPE c;
8508 HOST_WIDE_INT n;
8510 c = TREE_REAL_CST (arg1);
8512 /* Optimize pow(x,0.0) = 1.0. */
8513 if (REAL_VALUES_EQUAL (c, dconst0))
8514 return omit_one_operand (type, build_real (type, dconst1),
8515 arg0);
8517 /* Optimize pow(x,1.0) = x. */
8518 if (REAL_VALUES_EQUAL (c, dconst1))
8519 return arg0;
8521 /* Optimize pow(x,-1.0) = 1.0/x. */
8522 if (REAL_VALUES_EQUAL (c, dconstm1))
8523 return fold_build2 (RDIV_EXPR, type,
8524 build_real (type, dconst1), arg0);
8526 /* Optimize pow(x,0.5) = sqrt(x). */
8527 if (flag_unsafe_math_optimizations
8528 && REAL_VALUES_EQUAL (c, dconsthalf))
8530 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8532 if (sqrtfn != NULL_TREE)
8533 return build_call_expr (sqrtfn, 1, arg0);
8536 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8537 if (flag_unsafe_math_optimizations)
8539 const REAL_VALUE_TYPE dconstroot
8540 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8542 if (REAL_VALUES_EQUAL (c, dconstroot))
8544 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8545 if (cbrtfn != NULL_TREE)
8546 return build_call_expr (cbrtfn, 1, arg0);
8550 /* Check for an integer exponent. */
8551 n = real_to_integer (&c);
8552 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8553 if (real_identical (&c, &cint))
8555 /* Attempt to evaluate pow at compile-time, unless this should
8556 raise an exception. */
8557 if (TREE_CODE (arg0) == REAL_CST
8558 && !TREE_OVERFLOW (arg0)
8559 && (n > 0
8560 || (!flag_trapping_math && !flag_errno_math)
8561 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8563 REAL_VALUE_TYPE x;
8564 bool inexact;
8566 x = TREE_REAL_CST (arg0);
8567 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8568 if (flag_unsafe_math_optimizations || !inexact)
8569 return build_real (type, x);
8572 /* Strip sign ops from even integer powers. */
8573 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8575 tree narg0 = fold_strip_sign_ops (arg0);
8576 if (narg0)
8577 return build_call_expr (fndecl, 2, narg0, arg1);
8582 if (flag_unsafe_math_optimizations)
8584 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8586 /* Optimize pow(expN(x),y) = expN(x*y). */
8587 if (BUILTIN_EXPONENT_P (fcode))
8589 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8590 tree arg = CALL_EXPR_ARG (arg0, 0);
8591 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8592 return build_call_expr (expfn, 1, arg);
8595 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8596 if (BUILTIN_SQRT_P (fcode))
8598 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8599 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8600 build_real (type, dconsthalf));
8601 return build_call_expr (fndecl, 2, narg0, narg1);
8604 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8605 if (BUILTIN_CBRT_P (fcode))
8607 tree arg = CALL_EXPR_ARG (arg0, 0);
8608 if (tree_expr_nonnegative_p (arg))
8610 const REAL_VALUE_TYPE dconstroot
8611 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8612 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8613 build_real (type, dconstroot));
8614 return build_call_expr (fndecl, 2, arg, narg1);
8618 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8619 if (fcode == BUILT_IN_POW
8620 || fcode == BUILT_IN_POWF
8621 || fcode == BUILT_IN_POWL)
8623 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8624 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8625 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8626 return build_call_expr (fndecl, 2, arg00, narg1);
8630 return NULL_TREE;
8633 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8634 Return NULL_TREE if no simplification can be made. */
8635 static tree
8636 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8637 tree arg0, tree arg1, tree type)
8639 if (!validate_arg (arg0, REAL_TYPE)
8640 || !validate_arg (arg1, INTEGER_TYPE))
8641 return NULL_TREE;
8643 /* Optimize pow(1.0,y) = 1.0. */
8644 if (real_onep (arg0))
8645 return omit_one_operand (type, build_real (type, dconst1), arg1);
8647 if (host_integerp (arg1, 0))
8649 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8651 /* Evaluate powi at compile-time. */
8652 if (TREE_CODE (arg0) == REAL_CST
8653 && !TREE_OVERFLOW (arg0))
8655 REAL_VALUE_TYPE x;
8656 x = TREE_REAL_CST (arg0);
8657 real_powi (&x, TYPE_MODE (type), &x, c);
8658 return build_real (type, x);
8661 /* Optimize pow(x,0) = 1.0. */
8662 if (c == 0)
8663 return omit_one_operand (type, build_real (type, dconst1),
8664 arg0);
8666 /* Optimize pow(x,1) = x. */
8667 if (c == 1)
8668 return arg0;
8670 /* Optimize pow(x,-1) = 1.0/x. */
8671 if (c == -1)
8672 return fold_build2 (RDIV_EXPR, type,
8673 build_real (type, dconst1), arg0);
8676 return NULL_TREE;
8679 /* A subroutine of fold_builtin to fold the various exponent
8680 functions. Return NULL_TREE if no simplification can be made.
8681 FUNC is the corresponding MPFR exponent function. */
8683 static tree
8684 fold_builtin_exponent (tree fndecl, tree arg,
8685 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8687 if (validate_arg (arg, REAL_TYPE))
8689 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8690 tree res;
8692 /* Calculate the result when the argument is a constant. */
8693 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8694 return res;
8696 /* Optimize expN(logN(x)) = x. */
8697 if (flag_unsafe_math_optimizations)
8699 const enum built_in_function fcode = builtin_mathfn_code (arg);
8701 if ((func == mpfr_exp
8702 && (fcode == BUILT_IN_LOG
8703 || fcode == BUILT_IN_LOGF
8704 || fcode == BUILT_IN_LOGL))
8705 || (func == mpfr_exp2
8706 && (fcode == BUILT_IN_LOG2
8707 || fcode == BUILT_IN_LOG2F
8708 || fcode == BUILT_IN_LOG2L))
8709 || (func == mpfr_exp10
8710 && (fcode == BUILT_IN_LOG10
8711 || fcode == BUILT_IN_LOG10F
8712 || fcode == BUILT_IN_LOG10L)))
8713 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8717 return NULL_TREE;
8720 /* Return true if VAR is a VAR_DECL or a component thereof. */
8722 static bool
8723 var_decl_component_p (tree var)
8725 tree inner = var;
8726 while (handled_component_p (inner))
8727 inner = TREE_OPERAND (inner, 0);
8728 return SSA_VAR_P (inner);
8731 /* Fold function call to builtin memset. Return
8732 NULL_TREE if no simplification can be made. */
8734 static tree
8735 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8737 tree var, ret;
8738 unsigned HOST_WIDE_INT length, cval;
8740 if (! validate_arg (dest, POINTER_TYPE)
8741 || ! validate_arg (c, INTEGER_TYPE)
8742 || ! validate_arg (len, INTEGER_TYPE))
8743 return NULL_TREE;
8745 if (! host_integerp (len, 1))
8746 return NULL_TREE;
8748 /* If the LEN parameter is zero, return DEST. */
8749 if (integer_zerop (len))
8750 return omit_one_operand (type, dest, c);
8752 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8753 return NULL_TREE;
8755 var = dest;
8756 STRIP_NOPS (var);
8757 if (TREE_CODE (var) != ADDR_EXPR)
8758 return NULL_TREE;
8760 var = TREE_OPERAND (var, 0);
8761 if (TREE_THIS_VOLATILE (var))
8762 return NULL_TREE;
8764 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8765 && !POINTER_TYPE_P (TREE_TYPE (var)))
8766 return NULL_TREE;
8768 if (! var_decl_component_p (var))
8769 return NULL_TREE;
8771 length = tree_low_cst (len, 1);
8772 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8773 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8774 < (int) length)
8775 return NULL_TREE;
8777 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8778 return NULL_TREE;
8780 if (integer_zerop (c))
8781 cval = 0;
8782 else
8784 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8785 return NULL_TREE;
8787 cval = tree_low_cst (c, 1);
8788 cval &= 0xff;
8789 cval |= cval << 8;
8790 cval |= cval << 16;
8791 cval |= (cval << 31) << 1;
8794 ret = build_int_cst_type (TREE_TYPE (var), cval);
8795 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8796 if (ignore)
8797 return ret;
8799 return omit_one_operand (type, dest, ret);
8802 /* Fold function call to builtin memset. Return
8803 NULL_TREE if no simplification can be made. */
8805 static tree
8806 fold_builtin_bzero (tree dest, tree size, bool ignore)
8808 if (! validate_arg (dest, POINTER_TYPE)
8809 || ! validate_arg (size, INTEGER_TYPE))
8810 return NULL_TREE;
8812 if (!ignore)
8813 return NULL_TREE;
8815 /* New argument list transforming bzero(ptr x, int y) to
8816 memset(ptr x, int 0, size_t y). This is done this way
8817 so that if it isn't expanded inline, we fallback to
8818 calling bzero instead of memset. */
8820 return fold_builtin_memset (dest, integer_zero_node,
8821 fold_convert (sizetype, size),
8822 void_type_node, ignore);
8825 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8826 NULL_TREE if no simplification can be made.
8827 If ENDP is 0, return DEST (like memcpy).
8828 If ENDP is 1, return DEST+LEN (like mempcpy).
8829 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8830 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8831 (memmove). */
8833 static tree
8834 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8836 tree destvar, srcvar, expr;
8838 if (! validate_arg (dest, POINTER_TYPE)
8839 || ! validate_arg (src, POINTER_TYPE)
8840 || ! validate_arg (len, INTEGER_TYPE))
8841 return NULL_TREE;
8843 /* If the LEN parameter is zero, return DEST. */
8844 if (integer_zerop (len))
8845 return omit_one_operand (type, dest, src);
8847 /* If SRC and DEST are the same (and not volatile), return
8848 DEST{,+LEN,+LEN-1}. */
8849 if (operand_equal_p (src, dest, 0))
8850 expr = len;
8851 else
8853 tree srctype, desttype;
8854 int src_align, dest_align;
8856 if (endp == 3)
8858 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8859 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8861 /* Both DEST and SRC must be pointer types.
8862 ??? This is what old code did. Is the testing for pointer types
8863 really mandatory?
8865 If either SRC is readonly or length is 1, we can use memcpy. */
8866 if (!dest_align || !src_align)
8867 return NULL_TREE;
8868 if (readonly_data_expr (src)
8869 || (host_integerp (len, 1)
8870 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8871 >= tree_low_cst (len, 1))))
8873 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8874 if (!fn)
8875 return NULL_TREE;
8876 return build_call_expr (fn, 3, dest, src, len);
8879 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8880 srcvar = build_fold_indirect_ref (src);
8881 destvar = build_fold_indirect_ref (dest);
8882 if (srcvar
8883 && !TREE_THIS_VOLATILE (srcvar)
8884 && destvar
8885 && !TREE_THIS_VOLATILE (destvar))
8887 tree src_base, dest_base, fn;
8888 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8889 HOST_WIDE_INT size = -1;
8890 HOST_WIDE_INT maxsize = -1;
8892 src_base = srcvar;
8893 if (handled_component_p (src_base))
8894 src_base = get_ref_base_and_extent (src_base, &src_offset,
8895 &size, &maxsize);
8896 dest_base = destvar;
8897 if (handled_component_p (dest_base))
8898 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8899 &size, &maxsize);
8900 if (host_integerp (len, 1))
8902 maxsize = tree_low_cst (len, 1);
8903 if (maxsize
8904 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8905 maxsize = -1;
8906 else
8907 maxsize *= BITS_PER_UNIT;
8909 else
8910 maxsize = -1;
8911 if (SSA_VAR_P (src_base)
8912 && SSA_VAR_P (dest_base))
8914 if (operand_equal_p (src_base, dest_base, 0)
8915 && ranges_overlap_p (src_offset, maxsize,
8916 dest_offset, maxsize))
8917 return NULL_TREE;
8919 else if (TREE_CODE (src_base) == INDIRECT_REF
8920 && TREE_CODE (dest_base) == INDIRECT_REF)
8922 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8923 TREE_OPERAND (dest_base, 0), 0)
8924 || ranges_overlap_p (src_offset, maxsize,
8925 dest_offset, maxsize))
8926 return NULL_TREE;
8928 else
8929 return NULL_TREE;
8931 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8932 if (!fn)
8933 return NULL_TREE;
8934 return build_call_expr (fn, 3, dest, src, len);
8936 return NULL_TREE;
8939 if (!host_integerp (len, 0))
8940 return NULL_TREE;
8941 /* FIXME:
8942 This logic lose for arguments like (type *)malloc (sizeof (type)),
8943 since we strip the casts of up to VOID return value from malloc.
8944 Perhaps we ought to inherit type from non-VOID argument here? */
8945 STRIP_NOPS (src);
8946 STRIP_NOPS (dest);
8947 srctype = TREE_TYPE (TREE_TYPE (src));
8948 desttype = TREE_TYPE (TREE_TYPE (dest));
8949 if (!srctype || !desttype
8950 || !TYPE_SIZE_UNIT (srctype)
8951 || !TYPE_SIZE_UNIT (desttype)
8952 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8953 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8954 || TYPE_VOLATILE (srctype)
8955 || TYPE_VOLATILE (desttype))
8956 return NULL_TREE;
8958 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8959 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8960 if (dest_align < (int) TYPE_ALIGN (desttype)
8961 || src_align < (int) TYPE_ALIGN (srctype))
8962 return NULL_TREE;
8964 if (!ignore)
8965 dest = builtin_save_expr (dest);
8967 srcvar = NULL_TREE;
8968 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8970 srcvar = build_fold_indirect_ref (src);
8971 if (TREE_THIS_VOLATILE (srcvar))
8972 return NULL_TREE;
8973 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8974 srcvar = NULL_TREE;
8975 /* With memcpy, it is possible to bypass aliasing rules, so without
8976 this check i.e. execute/20060930-2.c would be misoptimized,
8977 because it use conflicting alias set to hold argument for the
8978 memcpy call. This check is probably unnecessary with
8979 -fno-strict-aliasing. Similarly for destvar. See also
8980 PR29286. */
8981 else if (!var_decl_component_p (srcvar))
8982 srcvar = NULL_TREE;
8985 destvar = NULL_TREE;
8986 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8988 destvar = build_fold_indirect_ref (dest);
8989 if (TREE_THIS_VOLATILE (destvar))
8990 return NULL_TREE;
8991 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8992 destvar = NULL_TREE;
8993 else if (!var_decl_component_p (destvar))
8994 destvar = NULL_TREE;
8997 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8998 return NULL_TREE;
9000 if (srcvar == NULL_TREE)
9002 tree srcptype;
9003 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
9004 return NULL_TREE;
9006 srctype = build_qualified_type (desttype, 0);
9007 if (src_align < (int) TYPE_ALIGN (srctype))
9009 if (AGGREGATE_TYPE_P (srctype)
9010 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
9011 return NULL_TREE;
9013 srctype = build_variant_type_copy (srctype);
9014 TYPE_ALIGN (srctype) = src_align;
9015 TYPE_USER_ALIGN (srctype) = 1;
9016 TYPE_PACKED (srctype) = 1;
9018 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
9019 src = fold_convert (srcptype, src);
9020 srcvar = build_fold_indirect_ref (src);
9022 else if (destvar == NULL_TREE)
9024 tree destptype;
9025 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9026 return NULL_TREE;
9028 desttype = build_qualified_type (srctype, 0);
9029 if (dest_align < (int) TYPE_ALIGN (desttype))
9031 if (AGGREGATE_TYPE_P (desttype)
9032 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9033 return NULL_TREE;
9035 desttype = build_variant_type_copy (desttype);
9036 TYPE_ALIGN (desttype) = dest_align;
9037 TYPE_USER_ALIGN (desttype) = 1;
9038 TYPE_PACKED (desttype) = 1;
9040 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9041 dest = fold_convert (destptype, dest);
9042 destvar = build_fold_indirect_ref (dest);
9045 if (srctype == desttype
9046 || (gimple_in_ssa_p (cfun)
9047 && useless_type_conversion_p (desttype, srctype)))
9048 expr = srcvar;
9049 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9050 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9051 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9052 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9053 expr = fold_convert (TREE_TYPE (destvar), srcvar);
9054 else
9055 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
9056 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9059 if (ignore)
9060 return expr;
9062 if (endp == 0 || endp == 3)
9063 return omit_one_operand (type, dest, expr);
9065 if (expr == len)
9066 expr = NULL_TREE;
9068 if (endp == 2)
9069 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
9070 ssize_int (1));
9072 len = fold_convert (sizetype, len);
9073 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9074 dest = fold_convert (type, dest);
9075 if (expr)
9076 dest = omit_one_operand (type, dest, expr);
9077 return dest;
9080 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9081 If LEN is not NULL, it represents the length of the string to be
9082 copied. Return NULL_TREE if no simplification can be made. */
9084 tree
9085 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
9087 tree fn;
9089 if (!validate_arg (dest, POINTER_TYPE)
9090 || !validate_arg (src, POINTER_TYPE))
9091 return NULL_TREE;
9093 /* If SRC and DEST are the same (and not volatile), return DEST. */
9094 if (operand_equal_p (src, dest, 0))
9095 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
9097 if (optimize_function_for_size_p (cfun))
9098 return NULL_TREE;
9100 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9101 if (!fn)
9102 return NULL_TREE;
9104 if (!len)
9106 len = c_strlen (src, 1);
9107 if (! len || TREE_SIDE_EFFECTS (len))
9108 return NULL_TREE;
9111 len = size_binop (PLUS_EXPR, len, ssize_int (1));
9112 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9113 build_call_expr (fn, 3, dest, src, len));
9116 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9117 If SLEN is not NULL, it represents the length of the source string.
9118 Return NULL_TREE if no simplification can be made. */
9120 tree
9121 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
9123 tree fn;
9125 if (!validate_arg (dest, POINTER_TYPE)
9126 || !validate_arg (src, POINTER_TYPE)
9127 || !validate_arg (len, INTEGER_TYPE))
9128 return NULL_TREE;
9130 /* If the LEN parameter is zero, return DEST. */
9131 if (integer_zerop (len))
9132 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9134 /* We can't compare slen with len as constants below if len is not a
9135 constant. */
9136 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9137 return NULL_TREE;
9139 if (!slen)
9140 slen = c_strlen (src, 1);
9142 /* Now, we must be passed a constant src ptr parameter. */
9143 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9144 return NULL_TREE;
9146 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
9148 /* We do not support simplification of this case, though we do
9149 support it when expanding trees into RTL. */
9150 /* FIXME: generate a call to __builtin_memset. */
9151 if (tree_int_cst_lt (slen, len))
9152 return NULL_TREE;
9154 /* OK transform into builtin memcpy. */
9155 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9156 if (!fn)
9157 return NULL_TREE;
9158 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9159 build_call_expr (fn, 3, dest, src, len));
9162 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9163 arguments to the call, and TYPE is its return type.
9164 Return NULL_TREE if no simplification can be made. */
9166 static tree
9167 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
9169 if (!validate_arg (arg1, POINTER_TYPE)
9170 || !validate_arg (arg2, INTEGER_TYPE)
9171 || !validate_arg (len, INTEGER_TYPE))
9172 return NULL_TREE;
9173 else
9175 const char *p1;
9177 if (TREE_CODE (arg2) != INTEGER_CST
9178 || !host_integerp (len, 1))
9179 return NULL_TREE;
9181 p1 = c_getstr (arg1);
9182 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9184 char c;
9185 const char *r;
9186 tree tem;
9188 if (target_char_cast (arg2, &c))
9189 return NULL_TREE;
9191 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9193 if (r == NULL)
9194 return build_int_cst (TREE_TYPE (arg1), 0);
9196 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9197 size_int (r - p1));
9198 return fold_convert (type, tem);
9200 return NULL_TREE;
9204 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9205 Return NULL_TREE if no simplification can be made. */
9207 static tree
9208 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9210 const char *p1, *p2;
9212 if (!validate_arg (arg1, POINTER_TYPE)
9213 || !validate_arg (arg2, POINTER_TYPE)
9214 || !validate_arg (len, INTEGER_TYPE))
9215 return NULL_TREE;
9217 /* If the LEN parameter is zero, return zero. */
9218 if (integer_zerop (len))
9219 return omit_two_operands (integer_type_node, integer_zero_node,
9220 arg1, arg2);
9222 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9223 if (operand_equal_p (arg1, arg2, 0))
9224 return omit_one_operand (integer_type_node, integer_zero_node, len);
9226 p1 = c_getstr (arg1);
9227 p2 = c_getstr (arg2);
9229 /* If all arguments are constant, and the value of len is not greater
9230 than the lengths of arg1 and arg2, evaluate at compile-time. */
9231 if (host_integerp (len, 1) && p1 && p2
9232 && compare_tree_int (len, strlen (p1) + 1) <= 0
9233 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9235 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9237 if (r > 0)
9238 return integer_one_node;
9239 else if (r < 0)
9240 return integer_minus_one_node;
9241 else
9242 return integer_zero_node;
9245 /* If len parameter is one, return an expression corresponding to
9246 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9247 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9249 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9250 tree cst_uchar_ptr_node
9251 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9253 tree ind1 = fold_convert (integer_type_node,
9254 build1 (INDIRECT_REF, cst_uchar_node,
9255 fold_convert (cst_uchar_ptr_node,
9256 arg1)));
9257 tree ind2 = fold_convert (integer_type_node,
9258 build1 (INDIRECT_REF, cst_uchar_node,
9259 fold_convert (cst_uchar_ptr_node,
9260 arg2)));
9261 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9264 return NULL_TREE;
9267 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9268 Return NULL_TREE if no simplification can be made. */
9270 static tree
9271 fold_builtin_strcmp (tree arg1, tree arg2)
9273 const char *p1, *p2;
9275 if (!validate_arg (arg1, POINTER_TYPE)
9276 || !validate_arg (arg2, POINTER_TYPE))
9277 return NULL_TREE;
9279 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9280 if (operand_equal_p (arg1, arg2, 0))
9281 return integer_zero_node;
9283 p1 = c_getstr (arg1);
9284 p2 = c_getstr (arg2);
9286 if (p1 && p2)
9288 const int i = strcmp (p1, p2);
9289 if (i < 0)
9290 return integer_minus_one_node;
9291 else if (i > 0)
9292 return integer_one_node;
9293 else
9294 return integer_zero_node;
9297 /* If the second arg is "", return *(const unsigned char*)arg1. */
9298 if (p2 && *p2 == '\0')
9300 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9301 tree cst_uchar_ptr_node
9302 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9304 return fold_convert (integer_type_node,
9305 build1 (INDIRECT_REF, cst_uchar_node,
9306 fold_convert (cst_uchar_ptr_node,
9307 arg1)));
9310 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9311 if (p1 && *p1 == '\0')
9313 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9314 tree cst_uchar_ptr_node
9315 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9317 tree temp = fold_convert (integer_type_node,
9318 build1 (INDIRECT_REF, cst_uchar_node,
9319 fold_convert (cst_uchar_ptr_node,
9320 arg2)));
9321 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9324 return NULL_TREE;
9327 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9328 Return NULL_TREE if no simplification can be made. */
9330 static tree
9331 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9333 const char *p1, *p2;
9335 if (!validate_arg (arg1, POINTER_TYPE)
9336 || !validate_arg (arg2, POINTER_TYPE)
9337 || !validate_arg (len, INTEGER_TYPE))
9338 return NULL_TREE;
9340 /* If the LEN parameter is zero, return zero. */
9341 if (integer_zerop (len))
9342 return omit_two_operands (integer_type_node, integer_zero_node,
9343 arg1, arg2);
9345 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9346 if (operand_equal_p (arg1, arg2, 0))
9347 return omit_one_operand (integer_type_node, integer_zero_node, len);
9349 p1 = c_getstr (arg1);
9350 p2 = c_getstr (arg2);
9352 if (host_integerp (len, 1) && p1 && p2)
9354 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9355 if (i > 0)
9356 return integer_one_node;
9357 else if (i < 0)
9358 return integer_minus_one_node;
9359 else
9360 return integer_zero_node;
9363 /* If the second arg is "", and the length is greater than zero,
9364 return *(const unsigned char*)arg1. */
9365 if (p2 && *p2 == '\0'
9366 && TREE_CODE (len) == INTEGER_CST
9367 && tree_int_cst_sgn (len) == 1)
9369 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9370 tree cst_uchar_ptr_node
9371 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9373 return fold_convert (integer_type_node,
9374 build1 (INDIRECT_REF, cst_uchar_node,
9375 fold_convert (cst_uchar_ptr_node,
9376 arg1)));
9379 /* If the first arg is "", and the length is greater than zero,
9380 return -*(const unsigned char*)arg2. */
9381 if (p1 && *p1 == '\0'
9382 && TREE_CODE (len) == INTEGER_CST
9383 && tree_int_cst_sgn (len) == 1)
9385 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9386 tree cst_uchar_ptr_node
9387 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9389 tree temp = fold_convert (integer_type_node,
9390 build1 (INDIRECT_REF, cst_uchar_node,
9391 fold_convert (cst_uchar_ptr_node,
9392 arg2)));
9393 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9396 /* If len parameter is one, return an expression corresponding to
9397 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9398 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9400 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9401 tree cst_uchar_ptr_node
9402 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9404 tree ind1 = fold_convert (integer_type_node,
9405 build1 (INDIRECT_REF, cst_uchar_node,
9406 fold_convert (cst_uchar_ptr_node,
9407 arg1)));
9408 tree ind2 = fold_convert (integer_type_node,
9409 build1 (INDIRECT_REF, cst_uchar_node,
9410 fold_convert (cst_uchar_ptr_node,
9411 arg2)));
9412 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9415 return NULL_TREE;
9418 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9419 ARG. Return NULL_TREE if no simplification can be made. */
9421 static tree
9422 fold_builtin_signbit (tree arg, tree type)
9424 tree temp;
9426 if (!validate_arg (arg, REAL_TYPE))
9427 return NULL_TREE;
9429 /* If ARG is a compile-time constant, determine the result. */
9430 if (TREE_CODE (arg) == REAL_CST
9431 && !TREE_OVERFLOW (arg))
9433 REAL_VALUE_TYPE c;
9435 c = TREE_REAL_CST (arg);
9436 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9437 return fold_convert (type, temp);
9440 /* If ARG is non-negative, the result is always zero. */
9441 if (tree_expr_nonnegative_p (arg))
9442 return omit_one_operand (type, integer_zero_node, arg);
9444 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9445 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9446 return fold_build2 (LT_EXPR, type, arg,
9447 build_real (TREE_TYPE (arg), dconst0));
9449 return NULL_TREE;
9452 /* Fold function call to builtin copysign, copysignf or copysignl with
9453 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9454 be made. */
9456 static tree
9457 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9459 tree tem;
9461 if (!validate_arg (arg1, REAL_TYPE)
9462 || !validate_arg (arg2, REAL_TYPE))
9463 return NULL_TREE;
9465 /* copysign(X,X) is X. */
9466 if (operand_equal_p (arg1, arg2, 0))
9467 return fold_convert (type, arg1);
9469 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9470 if (TREE_CODE (arg1) == REAL_CST
9471 && TREE_CODE (arg2) == REAL_CST
9472 && !TREE_OVERFLOW (arg1)
9473 && !TREE_OVERFLOW (arg2))
9475 REAL_VALUE_TYPE c1, c2;
9477 c1 = TREE_REAL_CST (arg1);
9478 c2 = TREE_REAL_CST (arg2);
9479 /* c1.sign := c2.sign. */
9480 real_copysign (&c1, &c2);
9481 return build_real (type, c1);
9484 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9485 Remember to evaluate Y for side-effects. */
9486 if (tree_expr_nonnegative_p (arg2))
9487 return omit_one_operand (type,
9488 fold_build1 (ABS_EXPR, type, arg1),
9489 arg2);
9491 /* Strip sign changing operations for the first argument. */
9492 tem = fold_strip_sign_ops (arg1);
9493 if (tem)
9494 return build_call_expr (fndecl, 2, tem, arg2);
9496 return NULL_TREE;
9499 /* Fold a call to builtin isascii with argument ARG. */
9501 static tree
9502 fold_builtin_isascii (tree arg)
9504 if (!validate_arg (arg, INTEGER_TYPE))
9505 return NULL_TREE;
9506 else
9508 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9509 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9510 build_int_cst (NULL_TREE,
9511 ~ (unsigned HOST_WIDE_INT) 0x7f));
9512 return fold_build2 (EQ_EXPR, integer_type_node,
9513 arg, integer_zero_node);
9517 /* Fold a call to builtin toascii with argument ARG. */
9519 static tree
9520 fold_builtin_toascii (tree arg)
9522 if (!validate_arg (arg, INTEGER_TYPE))
9523 return NULL_TREE;
9525 /* Transform toascii(c) -> (c & 0x7f). */
9526 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9527 build_int_cst (NULL_TREE, 0x7f));
9530 /* Fold a call to builtin isdigit with argument ARG. */
9532 static tree
9533 fold_builtin_isdigit (tree arg)
9535 if (!validate_arg (arg, INTEGER_TYPE))
9536 return NULL_TREE;
9537 else
9539 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9540 /* According to the C standard, isdigit is unaffected by locale.
9541 However, it definitely is affected by the target character set. */
9542 unsigned HOST_WIDE_INT target_digit0
9543 = lang_hooks.to_target_charset ('0');
9545 if (target_digit0 == 0)
9546 return NULL_TREE;
9548 arg = fold_convert (unsigned_type_node, arg);
9549 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9550 build_int_cst (unsigned_type_node, target_digit0));
9551 return fold_build2 (LE_EXPR, integer_type_node, arg,
9552 build_int_cst (unsigned_type_node, 9));
9556 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9558 static tree
9559 fold_builtin_fabs (tree arg, tree type)
9561 if (!validate_arg (arg, REAL_TYPE))
9562 return NULL_TREE;
9564 arg = fold_convert (type, arg);
9565 if (TREE_CODE (arg) == REAL_CST)
9566 return fold_abs_const (arg, type);
9567 return fold_build1 (ABS_EXPR, type, arg);
9570 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9572 static tree
9573 fold_builtin_abs (tree arg, tree type)
9575 if (!validate_arg (arg, INTEGER_TYPE))
9576 return NULL_TREE;
9578 arg = fold_convert (type, arg);
9579 if (TREE_CODE (arg) == INTEGER_CST)
9580 return fold_abs_const (arg, type);
9581 return fold_build1 (ABS_EXPR, type, arg);
9584 /* Fold a call to builtin fmin or fmax. */
9586 static tree
9587 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9589 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9591 /* Calculate the result when the argument is a constant. */
9592 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9594 if (res)
9595 return res;
9597 /* If either argument is NaN, return the other one. Avoid the
9598 transformation if we get (and honor) a signalling NaN. Using
9599 omit_one_operand() ensures we create a non-lvalue. */
9600 if (TREE_CODE (arg0) == REAL_CST
9601 && real_isnan (&TREE_REAL_CST (arg0))
9602 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9603 || ! TREE_REAL_CST (arg0).signalling))
9604 return omit_one_operand (type, arg1, arg0);
9605 if (TREE_CODE (arg1) == REAL_CST
9606 && real_isnan (&TREE_REAL_CST (arg1))
9607 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9608 || ! TREE_REAL_CST (arg1).signalling))
9609 return omit_one_operand (type, arg0, arg1);
9611 /* Transform fmin/fmax(x,x) -> x. */
9612 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9613 return omit_one_operand (type, arg0, arg1);
9615 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9616 functions to return the numeric arg if the other one is NaN.
9617 These tree codes don't honor that, so only transform if
9618 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9619 handled, so we don't have to worry about it either. */
9620 if (flag_finite_math_only)
9621 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9622 fold_convert (type, arg0),
9623 fold_convert (type, arg1));
9625 return NULL_TREE;
9628 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9630 static tree
9631 fold_builtin_carg (tree arg, tree type)
9633 if (validate_arg (arg, COMPLEX_TYPE))
9635 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9637 if (atan2_fn)
9639 tree new_arg = builtin_save_expr (arg);
9640 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9641 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9642 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9646 return NULL_TREE;
9649 /* Fold a call to builtin logb/ilogb. */
9651 static tree
9652 fold_builtin_logb (tree arg, tree rettype)
9654 if (! validate_arg (arg, REAL_TYPE))
9655 return NULL_TREE;
9657 STRIP_NOPS (arg);
9659 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9661 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9663 switch (value->cl)
9665 case rvc_nan:
9666 case rvc_inf:
9667 /* If arg is Inf or NaN and we're logb, return it. */
9668 if (TREE_CODE (rettype) == REAL_TYPE)
9669 return fold_convert (rettype, arg);
9670 /* Fall through... */
9671 case rvc_zero:
9672 /* Zero may set errno and/or raise an exception for logb, also
9673 for ilogb we don't know FP_ILOGB0. */
9674 return NULL_TREE;
9675 case rvc_normal:
9676 /* For normal numbers, proceed iff radix == 2. In GCC,
9677 normalized significands are in the range [0.5, 1.0). We
9678 want the exponent as if they were [1.0, 2.0) so get the
9679 exponent and subtract 1. */
9680 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9681 return fold_convert (rettype, build_int_cst (NULL_TREE,
9682 REAL_EXP (value)-1));
9683 break;
9687 return NULL_TREE;
9690 /* Fold a call to builtin significand, if radix == 2. */
9692 static tree
9693 fold_builtin_significand (tree arg, tree rettype)
9695 if (! validate_arg (arg, REAL_TYPE))
9696 return NULL_TREE;
9698 STRIP_NOPS (arg);
9700 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9702 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9704 switch (value->cl)
9706 case rvc_zero:
9707 case rvc_nan:
9708 case rvc_inf:
9709 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9710 return fold_convert (rettype, arg);
9711 case rvc_normal:
9712 /* For normal numbers, proceed iff radix == 2. */
9713 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9715 REAL_VALUE_TYPE result = *value;
9716 /* In GCC, normalized significands are in the range [0.5,
9717 1.0). We want them to be [1.0, 2.0) so set the
9718 exponent to 1. */
9719 SET_REAL_EXP (&result, 1);
9720 return build_real (rettype, result);
9722 break;
9726 return NULL_TREE;
9729 /* Fold a call to builtin frexp, we can assume the base is 2. */
9731 static tree
9732 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9734 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9735 return NULL_TREE;
9737 STRIP_NOPS (arg0);
9739 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9740 return NULL_TREE;
9742 arg1 = build_fold_indirect_ref (arg1);
9744 /* Proceed if a valid pointer type was passed in. */
9745 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9747 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9748 tree frac, exp;
9750 switch (value->cl)
9752 case rvc_zero:
9753 /* For +-0, return (*exp = 0, +-0). */
9754 exp = integer_zero_node;
9755 frac = arg0;
9756 break;
9757 case rvc_nan:
9758 case rvc_inf:
9759 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9760 return omit_one_operand (rettype, arg0, arg1);
9761 case rvc_normal:
9763 /* Since the frexp function always expects base 2, and in
9764 GCC normalized significands are already in the range
9765 [0.5, 1.0), we have exactly what frexp wants. */
9766 REAL_VALUE_TYPE frac_rvt = *value;
9767 SET_REAL_EXP (&frac_rvt, 0);
9768 frac = build_real (rettype, frac_rvt);
9769 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9771 break;
9772 default:
9773 gcc_unreachable ();
9776 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9777 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9778 TREE_SIDE_EFFECTS (arg1) = 1;
9779 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9782 return NULL_TREE;
9785 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9786 then we can assume the base is two. If it's false, then we have to
9787 check the mode of the TYPE parameter in certain cases. */
9789 static tree
9790 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9792 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9794 STRIP_NOPS (arg0);
9795 STRIP_NOPS (arg1);
9797 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9798 if (real_zerop (arg0) || integer_zerop (arg1)
9799 || (TREE_CODE (arg0) == REAL_CST
9800 && !real_isfinite (&TREE_REAL_CST (arg0))))
9801 return omit_one_operand (type, arg0, arg1);
9803 /* If both arguments are constant, then try to evaluate it. */
9804 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9805 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9806 && host_integerp (arg1, 0))
9808 /* Bound the maximum adjustment to twice the range of the
9809 mode's valid exponents. Use abs to ensure the range is
9810 positive as a sanity check. */
9811 const long max_exp_adj = 2 *
9812 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9813 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9815 /* Get the user-requested adjustment. */
9816 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9818 /* The requested adjustment must be inside this range. This
9819 is a preliminary cap to avoid things like overflow, we
9820 may still fail to compute the result for other reasons. */
9821 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9823 REAL_VALUE_TYPE initial_result;
9825 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9827 /* Ensure we didn't overflow. */
9828 if (! real_isinf (&initial_result))
9830 const REAL_VALUE_TYPE trunc_result
9831 = real_value_truncate (TYPE_MODE (type), initial_result);
9833 /* Only proceed if the target mode can hold the
9834 resulting value. */
9835 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9836 return build_real (type, trunc_result);
9842 return NULL_TREE;
9845 /* Fold a call to builtin modf. */
9847 static tree
9848 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9850 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9851 return NULL_TREE;
9853 STRIP_NOPS (arg0);
9855 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9856 return NULL_TREE;
9858 arg1 = build_fold_indirect_ref (arg1);
9860 /* Proceed if a valid pointer type was passed in. */
9861 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9863 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9864 REAL_VALUE_TYPE trunc, frac;
9866 switch (value->cl)
9868 case rvc_nan:
9869 case rvc_zero:
9870 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9871 trunc = frac = *value;
9872 break;
9873 case rvc_inf:
9874 /* For +-Inf, return (*arg1 = arg0, +-0). */
9875 frac = dconst0;
9876 frac.sign = value->sign;
9877 trunc = *value;
9878 break;
9879 case rvc_normal:
9880 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9881 real_trunc (&trunc, VOIDmode, value);
9882 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9883 /* If the original number was negative and already
9884 integral, then the fractional part is -0.0. */
9885 if (value->sign && frac.cl == rvc_zero)
9886 frac.sign = value->sign;
9887 break;
9890 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9891 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9892 build_real (rettype, trunc));
9893 TREE_SIDE_EFFECTS (arg1) = 1;
9894 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9895 build_real (rettype, frac));
9898 return NULL_TREE;
9901 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9902 ARG is the argument for the call. */
9904 static tree
9905 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9907 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9908 REAL_VALUE_TYPE r;
9910 if (!validate_arg (arg, REAL_TYPE))
9911 return NULL_TREE;
9913 switch (builtin_index)
9915 case BUILT_IN_ISINF:
9916 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9917 return omit_one_operand (type, integer_zero_node, arg);
9919 if (TREE_CODE (arg) == REAL_CST)
9921 r = TREE_REAL_CST (arg);
9922 if (real_isinf (&r))
9923 return real_compare (GT_EXPR, &r, &dconst0)
9924 ? integer_one_node : integer_minus_one_node;
9925 else
9926 return integer_zero_node;
9929 return NULL_TREE;
9931 case BUILT_IN_ISINF_SIGN:
9933 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9934 /* In a boolean context, GCC will fold the inner COND_EXPR to
9935 1. So e.g. "if (isinf_sign(x))" would be folded to just
9936 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9937 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9938 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9939 tree tmp = NULL_TREE;
9941 arg = builtin_save_expr (arg);
9943 if (signbit_fn && isinf_fn)
9945 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9946 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9948 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9949 signbit_call, integer_zero_node);
9950 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9951 isinf_call, integer_zero_node);
9953 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9954 integer_minus_one_node, integer_one_node);
9955 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9956 integer_zero_node);
9959 return tmp;
9962 case BUILT_IN_ISFINITE:
9963 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9964 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9965 return omit_one_operand (type, integer_one_node, arg);
9967 if (TREE_CODE (arg) == REAL_CST)
9969 r = TREE_REAL_CST (arg);
9970 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9973 return NULL_TREE;
9975 case BUILT_IN_ISNAN:
9976 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9977 return omit_one_operand (type, integer_zero_node, arg);
9979 if (TREE_CODE (arg) == REAL_CST)
9981 r = TREE_REAL_CST (arg);
9982 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9985 arg = builtin_save_expr (arg);
9986 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9988 default:
9989 gcc_unreachable ();
9993 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9994 This builtin will generate code to return the appropriate floating
9995 point classification depending on the value of the floating point
9996 number passed in. The possible return values must be supplied as
9997 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9998 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9999 one floating point argument which is "type generic". */
10001 static tree
10002 fold_builtin_fpclassify (tree exp)
10004 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10005 arg, type, res, tmp;
10006 enum machine_mode mode;
10007 REAL_VALUE_TYPE r;
10008 char buf[128];
10010 /* Verify the required arguments in the original call. */
10011 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10012 INTEGER_TYPE, INTEGER_TYPE,
10013 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10014 return NULL_TREE;
10016 fp_nan = CALL_EXPR_ARG (exp, 0);
10017 fp_infinite = CALL_EXPR_ARG (exp, 1);
10018 fp_normal = CALL_EXPR_ARG (exp, 2);
10019 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10020 fp_zero = CALL_EXPR_ARG (exp, 4);
10021 arg = CALL_EXPR_ARG (exp, 5);
10022 type = TREE_TYPE (arg);
10023 mode = TYPE_MODE (type);
10024 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
10026 /* fpclassify(x) ->
10027 isnan(x) ? FP_NAN :
10028 (fabs(x) == Inf ? FP_INFINITE :
10029 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10030 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10032 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10033 build_real (type, dconst0));
10034 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
10036 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10037 real_from_string (&r, buf);
10038 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
10039 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
10041 if (HONOR_INFINITIES (mode))
10043 real_inf (&r);
10044 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10045 build_real (type, r));
10046 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
10049 if (HONOR_NANS (mode))
10051 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
10052 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
10055 return res;
10058 /* Fold a call to an unordered comparison function such as
10059 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10060 being called and ARG0 and ARG1 are the arguments for the call.
10061 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10062 the opposite of the desired result. UNORDERED_CODE is used
10063 for modes that can hold NaNs and ORDERED_CODE is used for
10064 the rest. */
10066 static tree
10067 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
10068 enum tree_code unordered_code,
10069 enum tree_code ordered_code)
10071 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10072 enum tree_code code;
10073 tree type0, type1;
10074 enum tree_code code0, code1;
10075 tree cmp_type = NULL_TREE;
10077 type0 = TREE_TYPE (arg0);
10078 type1 = TREE_TYPE (arg1);
10080 code0 = TREE_CODE (type0);
10081 code1 = TREE_CODE (type1);
10083 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10084 /* Choose the wider of two real types. */
10085 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10086 ? type0 : type1;
10087 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10088 cmp_type = type0;
10089 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10090 cmp_type = type1;
10092 arg0 = fold_convert (cmp_type, arg0);
10093 arg1 = fold_convert (cmp_type, arg1);
10095 if (unordered_code == UNORDERED_EXPR)
10097 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10098 return omit_two_operands (type, integer_zero_node, arg0, arg1);
10099 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
10102 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10103 : ordered_code;
10104 return fold_build1 (TRUTH_NOT_EXPR, type,
10105 fold_build2 (code, type, arg0, arg1));
10108 /* Fold a call to built-in function FNDECL with 0 arguments.
10109 IGNORE is true if the result of the function call is ignored. This
10110 function returns NULL_TREE if no simplification was possible. */
10112 static tree
10113 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10115 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10116 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10117 switch (fcode)
10119 CASE_FLT_FN (BUILT_IN_INF):
10120 case BUILT_IN_INFD32:
10121 case BUILT_IN_INFD64:
10122 case BUILT_IN_INFD128:
10123 return fold_builtin_inf (type, true);
10125 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10126 return fold_builtin_inf (type, false);
10128 case BUILT_IN_CLASSIFY_TYPE:
10129 return fold_builtin_classify_type (NULL_TREE);
10131 default:
10132 break;
10134 return NULL_TREE;
10137 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10138 IGNORE is true if the result of the function call is ignored. This
10139 function returns NULL_TREE if no simplification was possible. */
10141 static tree
10142 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
10144 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10145 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10146 switch (fcode)
10149 case BUILT_IN_CONSTANT_P:
10151 tree val = fold_builtin_constant_p (arg0);
10153 /* Gimplification will pull the CALL_EXPR for the builtin out of
10154 an if condition. When not optimizing, we'll not CSE it back.
10155 To avoid link error types of regressions, return false now. */
10156 if (!val && !optimize)
10157 val = integer_zero_node;
10159 return val;
10162 case BUILT_IN_CLASSIFY_TYPE:
10163 return fold_builtin_classify_type (arg0);
10165 case BUILT_IN_STRLEN:
10166 return fold_builtin_strlen (arg0);
10168 CASE_FLT_FN (BUILT_IN_FABS):
10169 return fold_builtin_fabs (arg0, type);
10171 case BUILT_IN_ABS:
10172 case BUILT_IN_LABS:
10173 case BUILT_IN_LLABS:
10174 case BUILT_IN_IMAXABS:
10175 return fold_builtin_abs (arg0, type);
10177 CASE_FLT_FN (BUILT_IN_CONJ):
10178 if (validate_arg (arg0, COMPLEX_TYPE))
10179 return fold_build1 (CONJ_EXPR, type, arg0);
10180 break;
10182 CASE_FLT_FN (BUILT_IN_CREAL):
10183 if (validate_arg (arg0, COMPLEX_TYPE))
10184 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
10185 break;
10187 CASE_FLT_FN (BUILT_IN_CIMAG):
10188 if (validate_arg (arg0, COMPLEX_TYPE))
10189 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10190 break;
10192 CASE_FLT_FN (BUILT_IN_CCOS):
10193 CASE_FLT_FN (BUILT_IN_CCOSH):
10194 /* These functions are "even", i.e. f(x) == f(-x). */
10195 if (validate_arg (arg0, COMPLEX_TYPE))
10197 tree narg = fold_strip_sign_ops (arg0);
10198 if (narg)
10199 return build_call_expr (fndecl, 1, narg);
10201 break;
10203 CASE_FLT_FN (BUILT_IN_CABS):
10204 return fold_builtin_cabs (arg0, type, fndecl);
10206 CASE_FLT_FN (BUILT_IN_CARG):
10207 return fold_builtin_carg (arg0, type);
10209 CASE_FLT_FN (BUILT_IN_SQRT):
10210 return fold_builtin_sqrt (arg0, type);
10212 CASE_FLT_FN (BUILT_IN_CBRT):
10213 return fold_builtin_cbrt (arg0, type);
10215 CASE_FLT_FN (BUILT_IN_ASIN):
10216 if (validate_arg (arg0, REAL_TYPE))
10217 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10218 &dconstm1, &dconst1, true);
10219 break;
10221 CASE_FLT_FN (BUILT_IN_ACOS):
10222 if (validate_arg (arg0, REAL_TYPE))
10223 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10224 &dconstm1, &dconst1, true);
10225 break;
10227 CASE_FLT_FN (BUILT_IN_ATAN):
10228 if (validate_arg (arg0, REAL_TYPE))
10229 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10230 break;
10232 CASE_FLT_FN (BUILT_IN_ASINH):
10233 if (validate_arg (arg0, REAL_TYPE))
10234 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10235 break;
10237 CASE_FLT_FN (BUILT_IN_ACOSH):
10238 if (validate_arg (arg0, REAL_TYPE))
10239 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10240 &dconst1, NULL, true);
10241 break;
10243 CASE_FLT_FN (BUILT_IN_ATANH):
10244 if (validate_arg (arg0, REAL_TYPE))
10245 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10246 &dconstm1, &dconst1, false);
10247 break;
10249 CASE_FLT_FN (BUILT_IN_SIN):
10250 if (validate_arg (arg0, REAL_TYPE))
10251 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10252 break;
10254 CASE_FLT_FN (BUILT_IN_COS):
10255 return fold_builtin_cos (arg0, type, fndecl);
10256 break;
10258 CASE_FLT_FN (BUILT_IN_TAN):
10259 return fold_builtin_tan (arg0, type);
10261 CASE_FLT_FN (BUILT_IN_CEXP):
10262 return fold_builtin_cexp (arg0, type);
10264 CASE_FLT_FN (BUILT_IN_CEXPI):
10265 if (validate_arg (arg0, REAL_TYPE))
10266 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10267 break;
10269 CASE_FLT_FN (BUILT_IN_SINH):
10270 if (validate_arg (arg0, REAL_TYPE))
10271 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10272 break;
10274 CASE_FLT_FN (BUILT_IN_COSH):
10275 return fold_builtin_cosh (arg0, type, fndecl);
10277 CASE_FLT_FN (BUILT_IN_TANH):
10278 if (validate_arg (arg0, REAL_TYPE))
10279 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10280 break;
10282 CASE_FLT_FN (BUILT_IN_ERF):
10283 if (validate_arg (arg0, REAL_TYPE))
10284 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10285 break;
10287 CASE_FLT_FN (BUILT_IN_ERFC):
10288 if (validate_arg (arg0, REAL_TYPE))
10289 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10290 break;
10292 CASE_FLT_FN (BUILT_IN_TGAMMA):
10293 if (validate_arg (arg0, REAL_TYPE))
10294 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10295 break;
10297 CASE_FLT_FN (BUILT_IN_EXP):
10298 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10300 CASE_FLT_FN (BUILT_IN_EXP2):
10301 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10303 CASE_FLT_FN (BUILT_IN_EXP10):
10304 CASE_FLT_FN (BUILT_IN_POW10):
10305 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10307 CASE_FLT_FN (BUILT_IN_EXPM1):
10308 if (validate_arg (arg0, REAL_TYPE))
10309 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10310 break;
10312 CASE_FLT_FN (BUILT_IN_LOG):
10313 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10315 CASE_FLT_FN (BUILT_IN_LOG2):
10316 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10318 CASE_FLT_FN (BUILT_IN_LOG10):
10319 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10321 CASE_FLT_FN (BUILT_IN_LOG1P):
10322 if (validate_arg (arg0, REAL_TYPE))
10323 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10324 &dconstm1, NULL, false);
10325 break;
10327 CASE_FLT_FN (BUILT_IN_J0):
10328 if (validate_arg (arg0, REAL_TYPE))
10329 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10330 NULL, NULL, 0);
10331 break;
10333 CASE_FLT_FN (BUILT_IN_J1):
10334 if (validate_arg (arg0, REAL_TYPE))
10335 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10336 NULL, NULL, 0);
10337 break;
10339 CASE_FLT_FN (BUILT_IN_Y0):
10340 if (validate_arg (arg0, REAL_TYPE))
10341 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10342 &dconst0, NULL, false);
10343 break;
10345 CASE_FLT_FN (BUILT_IN_Y1):
10346 if (validate_arg (arg0, REAL_TYPE))
10347 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10348 &dconst0, NULL, false);
10349 break;
10351 CASE_FLT_FN (BUILT_IN_NAN):
10352 case BUILT_IN_NAND32:
10353 case BUILT_IN_NAND64:
10354 case BUILT_IN_NAND128:
10355 return fold_builtin_nan (arg0, type, true);
10357 CASE_FLT_FN (BUILT_IN_NANS):
10358 return fold_builtin_nan (arg0, type, false);
10360 CASE_FLT_FN (BUILT_IN_FLOOR):
10361 return fold_builtin_floor (fndecl, arg0);
10363 CASE_FLT_FN (BUILT_IN_CEIL):
10364 return fold_builtin_ceil (fndecl, arg0);
10366 CASE_FLT_FN (BUILT_IN_TRUNC):
10367 return fold_builtin_trunc (fndecl, arg0);
10369 CASE_FLT_FN (BUILT_IN_ROUND):
10370 return fold_builtin_round (fndecl, arg0);
10372 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10373 CASE_FLT_FN (BUILT_IN_RINT):
10374 return fold_trunc_transparent_mathfn (fndecl, arg0);
10376 CASE_FLT_FN (BUILT_IN_LCEIL):
10377 CASE_FLT_FN (BUILT_IN_LLCEIL):
10378 CASE_FLT_FN (BUILT_IN_LFLOOR):
10379 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10380 CASE_FLT_FN (BUILT_IN_LROUND):
10381 CASE_FLT_FN (BUILT_IN_LLROUND):
10382 return fold_builtin_int_roundingfn (fndecl, arg0);
10384 CASE_FLT_FN (BUILT_IN_LRINT):
10385 CASE_FLT_FN (BUILT_IN_LLRINT):
10386 return fold_fixed_mathfn (fndecl, arg0);
10388 case BUILT_IN_BSWAP32:
10389 case BUILT_IN_BSWAP64:
10390 return fold_builtin_bswap (fndecl, arg0);
10392 CASE_INT_FN (BUILT_IN_FFS):
10393 CASE_INT_FN (BUILT_IN_CLZ):
10394 CASE_INT_FN (BUILT_IN_CTZ):
10395 CASE_INT_FN (BUILT_IN_POPCOUNT):
10396 CASE_INT_FN (BUILT_IN_PARITY):
10397 return fold_builtin_bitop (fndecl, arg0);
10399 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10400 return fold_builtin_signbit (arg0, type);
10402 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10403 return fold_builtin_significand (arg0, type);
10405 CASE_FLT_FN (BUILT_IN_ILOGB):
10406 CASE_FLT_FN (BUILT_IN_LOGB):
10407 return fold_builtin_logb (arg0, type);
10409 case BUILT_IN_ISASCII:
10410 return fold_builtin_isascii (arg0);
10412 case BUILT_IN_TOASCII:
10413 return fold_builtin_toascii (arg0);
10415 case BUILT_IN_ISDIGIT:
10416 return fold_builtin_isdigit (arg0);
10418 CASE_FLT_FN (BUILT_IN_FINITE):
10419 case BUILT_IN_FINITED32:
10420 case BUILT_IN_FINITED64:
10421 case BUILT_IN_FINITED128:
10422 case BUILT_IN_ISFINITE:
10423 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10425 CASE_FLT_FN (BUILT_IN_ISINF):
10426 case BUILT_IN_ISINFD32:
10427 case BUILT_IN_ISINFD64:
10428 case BUILT_IN_ISINFD128:
10429 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10431 case BUILT_IN_ISINF_SIGN:
10432 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10434 CASE_FLT_FN (BUILT_IN_ISNAN):
10435 case BUILT_IN_ISNAND32:
10436 case BUILT_IN_ISNAND64:
10437 case BUILT_IN_ISNAND128:
10438 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10440 case BUILT_IN_PRINTF:
10441 case BUILT_IN_PRINTF_UNLOCKED:
10442 case BUILT_IN_VPRINTF:
10443 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10445 default:
10446 break;
10449 return NULL_TREE;
10453 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10454 IGNORE is true if the result of the function call is ignored. This
10455 function returns NULL_TREE if no simplification was possible. */
10457 static tree
10458 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10460 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10461 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10463 switch (fcode)
10465 CASE_FLT_FN (BUILT_IN_JN):
10466 if (validate_arg (arg0, INTEGER_TYPE)
10467 && validate_arg (arg1, REAL_TYPE))
10468 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10469 break;
10471 CASE_FLT_FN (BUILT_IN_YN):
10472 if (validate_arg (arg0, INTEGER_TYPE)
10473 && validate_arg (arg1, REAL_TYPE))
10474 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10475 &dconst0, false);
10476 break;
10478 CASE_FLT_FN (BUILT_IN_DREM):
10479 CASE_FLT_FN (BUILT_IN_REMAINDER):
10480 if (validate_arg (arg0, REAL_TYPE)
10481 && validate_arg(arg1, REAL_TYPE))
10482 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10483 break;
10485 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10486 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10487 if (validate_arg (arg0, REAL_TYPE)
10488 && validate_arg(arg1, POINTER_TYPE))
10489 return do_mpfr_lgamma_r (arg0, arg1, type);
10490 break;
10492 CASE_FLT_FN (BUILT_IN_ATAN2):
10493 if (validate_arg (arg0, REAL_TYPE)
10494 && validate_arg(arg1, REAL_TYPE))
10495 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10496 break;
10498 CASE_FLT_FN (BUILT_IN_FDIM):
10499 if (validate_arg (arg0, REAL_TYPE)
10500 && validate_arg(arg1, REAL_TYPE))
10501 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10502 break;
10504 CASE_FLT_FN (BUILT_IN_HYPOT):
10505 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10507 CASE_FLT_FN (BUILT_IN_LDEXP):
10508 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10509 CASE_FLT_FN (BUILT_IN_SCALBN):
10510 CASE_FLT_FN (BUILT_IN_SCALBLN):
10511 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10513 CASE_FLT_FN (BUILT_IN_FREXP):
10514 return fold_builtin_frexp (arg0, arg1, type);
10516 CASE_FLT_FN (BUILT_IN_MODF):
10517 return fold_builtin_modf (arg0, arg1, type);
10519 case BUILT_IN_BZERO:
10520 return fold_builtin_bzero (arg0, arg1, ignore);
10522 case BUILT_IN_FPUTS:
10523 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10525 case BUILT_IN_FPUTS_UNLOCKED:
10526 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10528 case BUILT_IN_STRSTR:
10529 return fold_builtin_strstr (arg0, arg1, type);
10531 case BUILT_IN_STRCAT:
10532 return fold_builtin_strcat (arg0, arg1);
10534 case BUILT_IN_STRSPN:
10535 return fold_builtin_strspn (arg0, arg1);
10537 case BUILT_IN_STRCSPN:
10538 return fold_builtin_strcspn (arg0, arg1);
10540 case BUILT_IN_STRCHR:
10541 case BUILT_IN_INDEX:
10542 return fold_builtin_strchr (arg0, arg1, type);
10544 case BUILT_IN_STRRCHR:
10545 case BUILT_IN_RINDEX:
10546 return fold_builtin_strrchr (arg0, arg1, type);
10548 case BUILT_IN_STRCPY:
10549 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10551 case BUILT_IN_STPCPY:
10552 if (ignore)
10554 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10555 if (!fn)
10556 break;
10558 return build_call_expr (fn, 2, arg0, arg1);
10560 break;
10562 case BUILT_IN_STRCMP:
10563 return fold_builtin_strcmp (arg0, arg1);
10565 case BUILT_IN_STRPBRK:
10566 return fold_builtin_strpbrk (arg0, arg1, type);
10568 case BUILT_IN_EXPECT:
10569 return fold_builtin_expect (arg0, arg1);
10571 CASE_FLT_FN (BUILT_IN_POW):
10572 return fold_builtin_pow (fndecl, arg0, arg1, type);
10574 CASE_FLT_FN (BUILT_IN_POWI):
10575 return fold_builtin_powi (fndecl, arg0, arg1, type);
10577 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10578 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10580 CASE_FLT_FN (BUILT_IN_FMIN):
10581 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10583 CASE_FLT_FN (BUILT_IN_FMAX):
10584 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10586 case BUILT_IN_ISGREATER:
10587 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10588 case BUILT_IN_ISGREATEREQUAL:
10589 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10590 case BUILT_IN_ISLESS:
10591 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10592 case BUILT_IN_ISLESSEQUAL:
10593 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10594 case BUILT_IN_ISLESSGREATER:
10595 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10596 case BUILT_IN_ISUNORDERED:
10597 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10598 NOP_EXPR);
10600 /* We do the folding for va_start in the expander. */
10601 case BUILT_IN_VA_START:
10602 break;
10604 case BUILT_IN_SPRINTF:
10605 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10607 case BUILT_IN_OBJECT_SIZE:
10608 return fold_builtin_object_size (arg0, arg1);
10610 case BUILT_IN_PRINTF:
10611 case BUILT_IN_PRINTF_UNLOCKED:
10612 case BUILT_IN_VPRINTF:
10613 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10615 case BUILT_IN_PRINTF_CHK:
10616 case BUILT_IN_VPRINTF_CHK:
10617 if (!validate_arg (arg0, INTEGER_TYPE)
10618 || TREE_SIDE_EFFECTS (arg0))
10619 return NULL_TREE;
10620 else
10621 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10622 break;
10624 case BUILT_IN_FPRINTF:
10625 case BUILT_IN_FPRINTF_UNLOCKED:
10626 case BUILT_IN_VFPRINTF:
10627 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10628 ignore, fcode);
10630 default:
10631 break;
10633 return NULL_TREE;
10636 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10637 and ARG2. IGNORE is true if the result of the function call is ignored.
10638 This function returns NULL_TREE if no simplification was possible. */
10640 static tree
10641 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10643 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10644 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10645 switch (fcode)
10648 CASE_FLT_FN (BUILT_IN_SINCOS):
10649 return fold_builtin_sincos (arg0, arg1, arg2);
10651 CASE_FLT_FN (BUILT_IN_FMA):
10652 if (validate_arg (arg0, REAL_TYPE)
10653 && validate_arg(arg1, REAL_TYPE)
10654 && validate_arg(arg2, REAL_TYPE))
10655 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10656 break;
10658 CASE_FLT_FN (BUILT_IN_REMQUO):
10659 if (validate_arg (arg0, REAL_TYPE)
10660 && validate_arg(arg1, REAL_TYPE)
10661 && validate_arg(arg2, POINTER_TYPE))
10662 return do_mpfr_remquo (arg0, arg1, arg2);
10663 break;
10665 case BUILT_IN_MEMSET:
10666 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10668 case BUILT_IN_BCOPY:
10669 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10671 case BUILT_IN_MEMCPY:
10672 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10674 case BUILT_IN_MEMPCPY:
10675 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10677 case BUILT_IN_MEMMOVE:
10678 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10680 case BUILT_IN_STRNCAT:
10681 return fold_builtin_strncat (arg0, arg1, arg2);
10683 case BUILT_IN_STRNCPY:
10684 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10686 case BUILT_IN_STRNCMP:
10687 return fold_builtin_strncmp (arg0, arg1, arg2);
10689 case BUILT_IN_MEMCHR:
10690 return fold_builtin_memchr (arg0, arg1, arg2, type);
10692 case BUILT_IN_BCMP:
10693 case BUILT_IN_MEMCMP:
10694 return fold_builtin_memcmp (arg0, arg1, arg2);;
10696 case BUILT_IN_SPRINTF:
10697 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10699 case BUILT_IN_STRCPY_CHK:
10700 case BUILT_IN_STPCPY_CHK:
10701 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10702 ignore, fcode);
10704 case BUILT_IN_STRCAT_CHK:
10705 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10707 case BUILT_IN_PRINTF_CHK:
10708 case BUILT_IN_VPRINTF_CHK:
10709 if (!validate_arg (arg0, INTEGER_TYPE)
10710 || TREE_SIDE_EFFECTS (arg0))
10711 return NULL_TREE;
10712 else
10713 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10714 break;
10716 case BUILT_IN_FPRINTF:
10717 case BUILT_IN_FPRINTF_UNLOCKED:
10718 case BUILT_IN_VFPRINTF:
10719 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10721 case BUILT_IN_FPRINTF_CHK:
10722 case BUILT_IN_VFPRINTF_CHK:
10723 if (!validate_arg (arg1, INTEGER_TYPE)
10724 || TREE_SIDE_EFFECTS (arg1))
10725 return NULL_TREE;
10726 else
10727 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10728 ignore, fcode);
10730 default:
10731 break;
10733 return NULL_TREE;
10736 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10737 ARG2, and ARG3. IGNORE is true if the result of the function call is
10738 ignored. This function returns NULL_TREE if no simplification was
10739 possible. */
10741 static tree
10742 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10743 bool ignore)
10745 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10747 switch (fcode)
10749 case BUILT_IN_MEMCPY_CHK:
10750 case BUILT_IN_MEMPCPY_CHK:
10751 case BUILT_IN_MEMMOVE_CHK:
10752 case BUILT_IN_MEMSET_CHK:
10753 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10754 NULL_TREE, ignore,
10755 DECL_FUNCTION_CODE (fndecl));
10757 case BUILT_IN_STRNCPY_CHK:
10758 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10760 case BUILT_IN_STRNCAT_CHK:
10761 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10763 case BUILT_IN_FPRINTF_CHK:
10764 case BUILT_IN_VFPRINTF_CHK:
10765 if (!validate_arg (arg1, INTEGER_TYPE)
10766 || TREE_SIDE_EFFECTS (arg1))
10767 return NULL_TREE;
10768 else
10769 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10770 ignore, fcode);
10771 break;
10773 default:
10774 break;
10776 return NULL_TREE;
10779 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10780 arguments, where NARGS <= 4. IGNORE is true if the result of the
10781 function call is ignored. This function returns NULL_TREE if no
10782 simplification was possible. Note that this only folds builtins with
10783 fixed argument patterns. Foldings that do varargs-to-varargs
10784 transformations, or that match calls with more than 4 arguments,
10785 need to be handled with fold_builtin_varargs instead. */
10787 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10789 static tree
10790 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10792 tree ret = NULL_TREE;
10794 switch (nargs)
10796 case 0:
10797 ret = fold_builtin_0 (fndecl, ignore);
10798 break;
10799 case 1:
10800 ret = fold_builtin_1 (fndecl, args[0], ignore);
10801 break;
10802 case 2:
10803 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10804 break;
10805 case 3:
10806 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10807 break;
10808 case 4:
10809 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10810 ignore);
10811 break;
10812 default:
10813 break;
10815 if (ret)
10817 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10818 TREE_NO_WARNING (ret) = 1;
10819 return ret;
10821 return NULL_TREE;
10824 /* Builtins with folding operations that operate on "..." arguments
10825 need special handling; we need to store the arguments in a convenient
10826 data structure before attempting any folding. Fortunately there are
10827 only a few builtins that fall into this category. FNDECL is the
10828 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10829 result of the function call is ignored. */
10831 static tree
10832 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10834 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10835 tree ret = NULL_TREE;
10837 switch (fcode)
10839 case BUILT_IN_SPRINTF_CHK:
10840 case BUILT_IN_VSPRINTF_CHK:
10841 ret = fold_builtin_sprintf_chk (exp, fcode);
10842 break;
10844 case BUILT_IN_SNPRINTF_CHK:
10845 case BUILT_IN_VSNPRINTF_CHK:
10846 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10847 break;
10849 case BUILT_IN_FPCLASSIFY:
10850 ret = fold_builtin_fpclassify (exp);
10851 break;
10853 default:
10854 break;
10856 if (ret)
10858 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10859 TREE_NO_WARNING (ret) = 1;
10860 return ret;
10862 return NULL_TREE;
10865 /* Return true if FNDECL shouldn't be folded right now.
10866 If a built-in function has an inline attribute always_inline
10867 wrapper, defer folding it after always_inline functions have
10868 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10869 might not be performed. */
10871 static bool
10872 avoid_folding_inline_builtin (tree fndecl)
10874 return (DECL_DECLARED_INLINE_P (fndecl)
10875 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10876 && cfun
10877 && !cfun->always_inline_functions_inlined
10878 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10881 /* A wrapper function for builtin folding that prevents warnings for
10882 "statement without effect" and the like, caused by removing the
10883 call node earlier than the warning is generated. */
10885 tree
10886 fold_call_expr (tree exp, bool ignore)
10888 tree ret = NULL_TREE;
10889 tree fndecl = get_callee_fndecl (exp);
10890 if (fndecl
10891 && TREE_CODE (fndecl) == FUNCTION_DECL
10892 && DECL_BUILT_IN (fndecl)
10893 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10894 yet. Defer folding until we see all the arguments
10895 (after inlining). */
10896 && !CALL_EXPR_VA_ARG_PACK (exp))
10898 int nargs = call_expr_nargs (exp);
10900 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10901 instead last argument is __builtin_va_arg_pack (). Defer folding
10902 even in that case, until arguments are finalized. */
10903 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10905 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10906 if (fndecl2
10907 && TREE_CODE (fndecl2) == FUNCTION_DECL
10908 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10909 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10910 return NULL_TREE;
10913 if (avoid_folding_inline_builtin (fndecl))
10914 return NULL_TREE;
10916 /* FIXME: Don't use a list in this interface. */
10917 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10918 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10919 else
10921 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10923 tree *args = CALL_EXPR_ARGP (exp);
10924 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10926 if (!ret)
10927 ret = fold_builtin_varargs (fndecl, exp, ignore);
10928 if (ret)
10930 /* Propagate location information from original call to
10931 expansion of builtin. Otherwise things like
10932 maybe_emit_chk_warning, that operate on the expansion
10933 of a builtin, will use the wrong location information. */
10934 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10936 tree realret = ret;
10937 if (TREE_CODE (ret) == NOP_EXPR)
10938 realret = TREE_OPERAND (ret, 0);
10939 if (CAN_HAVE_LOCATION_P (realret)
10940 && !EXPR_HAS_LOCATION (realret))
10941 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10943 return ret;
10947 return NULL_TREE;
10950 /* Conveniently construct a function call expression. FNDECL names the
10951 function to be called and ARGLIST is a TREE_LIST of arguments. */
10953 tree
10954 build_function_call_expr (tree fndecl, tree arglist)
10956 tree fntype = TREE_TYPE (fndecl);
10957 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10958 int n = list_length (arglist);
10959 tree *argarray = (tree *) alloca (n * sizeof (tree));
10960 int i;
10962 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10963 argarray[i] = TREE_VALUE (arglist);
10964 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10967 /* Conveniently construct a function call expression. FNDECL names the
10968 function to be called, N is the number of arguments, and the "..."
10969 parameters are the argument expressions. */
10971 tree
10972 build_call_expr (tree fndecl, int n, ...)
10974 va_list ap;
10975 tree fntype = TREE_TYPE (fndecl);
10976 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10977 tree *argarray = (tree *) alloca (n * sizeof (tree));
10978 int i;
10980 va_start (ap, n);
10981 for (i = 0; i < n; i++)
10982 argarray[i] = va_arg (ap, tree);
10983 va_end (ap);
10984 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10987 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10988 N arguments are passed in the array ARGARRAY. */
10990 tree
10991 fold_builtin_call_array (tree type,
10992 tree fn,
10993 int n,
10994 tree *argarray)
10996 tree ret = NULL_TREE;
10997 int i;
10998 tree exp;
11000 if (TREE_CODE (fn) == ADDR_EXPR)
11002 tree fndecl = TREE_OPERAND (fn, 0);
11003 if (TREE_CODE (fndecl) == FUNCTION_DECL
11004 && DECL_BUILT_IN (fndecl))
11006 /* If last argument is __builtin_va_arg_pack (), arguments to this
11007 function are not finalized yet. Defer folding until they are. */
11008 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11010 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11011 if (fndecl2
11012 && TREE_CODE (fndecl2) == FUNCTION_DECL
11013 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11014 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11015 return build_call_array (type, fn, n, argarray);
11017 if (avoid_folding_inline_builtin (fndecl))
11018 return build_call_array (type, fn, n, argarray);
11019 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11021 tree arglist = NULL_TREE;
11022 for (i = n - 1; i >= 0; i--)
11023 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11024 ret = targetm.fold_builtin (fndecl, arglist, false);
11025 if (ret)
11026 return ret;
11027 return build_call_array (type, fn, n, argarray);
11029 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11031 /* First try the transformations that don't require consing up
11032 an exp. */
11033 ret = fold_builtin_n (fndecl, argarray, n, false);
11034 if (ret)
11035 return ret;
11038 /* If we got this far, we need to build an exp. */
11039 exp = build_call_array (type, fn, n, argarray);
11040 ret = fold_builtin_varargs (fndecl, exp, false);
11041 return ret ? ret : exp;
11045 return build_call_array (type, fn, n, argarray);
11048 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11049 along with N new arguments specified as the "..." parameters. SKIP
11050 is the number of arguments in EXP to be omitted. This function is used
11051 to do varargs-to-varargs transformations. */
11053 static tree
11054 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
11056 int oldnargs = call_expr_nargs (exp);
11057 int nargs = oldnargs - skip + n;
11058 tree fntype = TREE_TYPE (fndecl);
11059 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11060 tree *buffer;
11062 if (n > 0)
11064 int i, j;
11065 va_list ap;
11067 buffer = XALLOCAVEC (tree, nargs);
11068 va_start (ap, n);
11069 for (i = 0; i < n; i++)
11070 buffer[i] = va_arg (ap, tree);
11071 va_end (ap);
11072 for (j = skip; j < oldnargs; j++, i++)
11073 buffer[i] = CALL_EXPR_ARG (exp, j);
11075 else
11076 buffer = CALL_EXPR_ARGP (exp) + skip;
11078 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
11081 /* Validate a single argument ARG against a tree code CODE representing
11082 a type. */
11084 static bool
11085 validate_arg (const_tree arg, enum tree_code code)
11087 if (!arg)
11088 return false;
11089 else if (code == POINTER_TYPE)
11090 return POINTER_TYPE_P (TREE_TYPE (arg));
11091 else if (code == INTEGER_TYPE)
11092 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11093 return code == TREE_CODE (TREE_TYPE (arg));
11096 /* This function validates the types of a function call argument list
11097 against a specified list of tree_codes. If the last specifier is a 0,
11098 that represents an ellipses, otherwise the last specifier must be a
11099 VOID_TYPE.
11101 This is the GIMPLE version of validate_arglist. Eventually we want to
11102 completely convert builtins.c to work from GIMPLEs and the tree based
11103 validate_arglist will then be removed. */
11105 bool
11106 validate_gimple_arglist (const_gimple call, ...)
11108 enum tree_code code;
11109 bool res = 0;
11110 va_list ap;
11111 const_tree arg;
11112 size_t i;
11114 va_start (ap, call);
11115 i = 0;
11119 code = va_arg (ap, enum tree_code);
11120 switch (code)
11122 case 0:
11123 /* This signifies an ellipses, any further arguments are all ok. */
11124 res = true;
11125 goto end;
11126 case VOID_TYPE:
11127 /* This signifies an endlink, if no arguments remain, return
11128 true, otherwise return false. */
11129 res = (i == gimple_call_num_args (call));
11130 goto end;
11131 default:
11132 /* If no parameters remain or the parameter's code does not
11133 match the specified code, return false. Otherwise continue
11134 checking any remaining arguments. */
11135 arg = gimple_call_arg (call, i++);
11136 if (!validate_arg (arg, code))
11137 goto end;
11138 break;
11141 while (1);
11143 /* We need gotos here since we can only have one VA_CLOSE in a
11144 function. */
11145 end: ;
11146 va_end (ap);
11148 return res;
11151 /* This function validates the types of a function call argument list
11152 against a specified list of tree_codes. If the last specifier is a 0,
11153 that represents an ellipses, otherwise the last specifier must be a
11154 VOID_TYPE. */
11156 bool
11157 validate_arglist (const_tree callexpr, ...)
11159 enum tree_code code;
11160 bool res = 0;
11161 va_list ap;
11162 const_call_expr_arg_iterator iter;
11163 const_tree arg;
11165 va_start (ap, callexpr);
11166 init_const_call_expr_arg_iterator (callexpr, &iter);
11170 code = va_arg (ap, enum tree_code);
11171 switch (code)
11173 case 0:
11174 /* This signifies an ellipses, any further arguments are all ok. */
11175 res = true;
11176 goto end;
11177 case VOID_TYPE:
11178 /* This signifies an endlink, if no arguments remain, return
11179 true, otherwise return false. */
11180 res = !more_const_call_expr_args_p (&iter);
11181 goto end;
11182 default:
11183 /* If no parameters remain or the parameter's code does not
11184 match the specified code, return false. Otherwise continue
11185 checking any remaining arguments. */
11186 arg = next_const_call_expr_arg (&iter);
11187 if (!validate_arg (arg, code))
11188 goto end;
11189 break;
11192 while (1);
11194 /* We need gotos here since we can only have one VA_CLOSE in a
11195 function. */
11196 end: ;
11197 va_end (ap);
11199 return res;
11202 /* Default target-specific builtin expander that does nothing. */
11205 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11206 rtx target ATTRIBUTE_UNUSED,
11207 rtx subtarget ATTRIBUTE_UNUSED,
11208 enum machine_mode mode ATTRIBUTE_UNUSED,
11209 int ignore ATTRIBUTE_UNUSED)
11211 return NULL_RTX;
11214 /* Returns true is EXP represents data that would potentially reside
11215 in a readonly section. */
11217 static bool
11218 readonly_data_expr (tree exp)
11220 STRIP_NOPS (exp);
11222 if (TREE_CODE (exp) != ADDR_EXPR)
11223 return false;
11225 exp = get_base_address (TREE_OPERAND (exp, 0));
11226 if (!exp)
11227 return false;
11229 /* Make sure we call decl_readonly_section only for trees it
11230 can handle (since it returns true for everything it doesn't
11231 understand). */
11232 if (TREE_CODE (exp) == STRING_CST
11233 || TREE_CODE (exp) == CONSTRUCTOR
11234 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11235 return decl_readonly_section (exp, 0);
11236 else
11237 return false;
11240 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11241 to the call, and TYPE is its return type.
11243 Return NULL_TREE if no simplification was possible, otherwise return the
11244 simplified form of the call as a tree.
11246 The simplified form may be a constant or other expression which
11247 computes the same value, but in a more efficient manner (including
11248 calls to other builtin functions).
11250 The call may contain arguments which need to be evaluated, but
11251 which are not useful to determine the result of the call. In
11252 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11253 COMPOUND_EXPR will be an argument which must be evaluated.
11254 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11255 COMPOUND_EXPR in the chain will contain the tree for the simplified
11256 form of the builtin function call. */
11258 static tree
11259 fold_builtin_strstr (tree s1, tree s2, tree type)
11261 if (!validate_arg (s1, POINTER_TYPE)
11262 || !validate_arg (s2, POINTER_TYPE))
11263 return NULL_TREE;
11264 else
11266 tree fn;
11267 const char *p1, *p2;
11269 p2 = c_getstr (s2);
11270 if (p2 == NULL)
11271 return NULL_TREE;
11273 p1 = c_getstr (s1);
11274 if (p1 != NULL)
11276 const char *r = strstr (p1, p2);
11277 tree tem;
11279 if (r == NULL)
11280 return build_int_cst (TREE_TYPE (s1), 0);
11282 /* Return an offset into the constant string argument. */
11283 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11284 s1, size_int (r - p1));
11285 return fold_convert (type, tem);
11288 /* The argument is const char *, and the result is char *, so we need
11289 a type conversion here to avoid a warning. */
11290 if (p2[0] == '\0')
11291 return fold_convert (type, s1);
11293 if (p2[1] != '\0')
11294 return NULL_TREE;
11296 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11297 if (!fn)
11298 return NULL_TREE;
11300 /* New argument list transforming strstr(s1, s2) to
11301 strchr(s1, s2[0]). */
11302 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11306 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11307 the call, and TYPE is its return type.
11309 Return NULL_TREE if no simplification was possible, otherwise return the
11310 simplified form of the call as a tree.
11312 The simplified form may be a constant or other expression which
11313 computes the same value, but in a more efficient manner (including
11314 calls to other builtin functions).
11316 The call may contain arguments which need to be evaluated, but
11317 which are not useful to determine the result of the call. In
11318 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11319 COMPOUND_EXPR will be an argument which must be evaluated.
11320 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11321 COMPOUND_EXPR in the chain will contain the tree for the simplified
11322 form of the builtin function call. */
11324 static tree
11325 fold_builtin_strchr (tree s1, tree s2, tree type)
11327 if (!validate_arg (s1, POINTER_TYPE)
11328 || !validate_arg (s2, INTEGER_TYPE))
11329 return NULL_TREE;
11330 else
11332 const char *p1;
11334 if (TREE_CODE (s2) != INTEGER_CST)
11335 return NULL_TREE;
11337 p1 = c_getstr (s1);
11338 if (p1 != NULL)
11340 char c;
11341 const char *r;
11342 tree tem;
11344 if (target_char_cast (s2, &c))
11345 return NULL_TREE;
11347 r = strchr (p1, c);
11349 if (r == NULL)
11350 return build_int_cst (TREE_TYPE (s1), 0);
11352 /* Return an offset into the constant string argument. */
11353 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11354 s1, size_int (r - p1));
11355 return fold_convert (type, tem);
11357 return NULL_TREE;
11361 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11362 the call, and TYPE is its return type.
11364 Return NULL_TREE if no simplification was possible, otherwise return the
11365 simplified form of the call as a tree.
11367 The simplified form may be a constant or other expression which
11368 computes the same value, but in a more efficient manner (including
11369 calls to other builtin functions).
11371 The call may contain arguments which need to be evaluated, but
11372 which are not useful to determine the result of the call. In
11373 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11374 COMPOUND_EXPR will be an argument which must be evaluated.
11375 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11376 COMPOUND_EXPR in the chain will contain the tree for the simplified
11377 form of the builtin function call. */
11379 static tree
11380 fold_builtin_strrchr (tree s1, tree s2, tree type)
11382 if (!validate_arg (s1, POINTER_TYPE)
11383 || !validate_arg (s2, INTEGER_TYPE))
11384 return NULL_TREE;
11385 else
11387 tree fn;
11388 const char *p1;
11390 if (TREE_CODE (s2) != INTEGER_CST)
11391 return NULL_TREE;
11393 p1 = c_getstr (s1);
11394 if (p1 != NULL)
11396 char c;
11397 const char *r;
11398 tree tem;
11400 if (target_char_cast (s2, &c))
11401 return NULL_TREE;
11403 r = strrchr (p1, c);
11405 if (r == NULL)
11406 return build_int_cst (TREE_TYPE (s1), 0);
11408 /* Return an offset into the constant string argument. */
11409 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11410 s1, size_int (r - p1));
11411 return fold_convert (type, tem);
11414 if (! integer_zerop (s2))
11415 return NULL_TREE;
11417 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11418 if (!fn)
11419 return NULL_TREE;
11421 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11422 return build_call_expr (fn, 2, s1, s2);
11426 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11427 to the call, and TYPE is its return type.
11429 Return NULL_TREE if no simplification was possible, otherwise return the
11430 simplified form of the call as a tree.
11432 The simplified form may be a constant or other expression which
11433 computes the same value, but in a more efficient manner (including
11434 calls to other builtin functions).
11436 The call may contain arguments which need to be evaluated, but
11437 which are not useful to determine the result of the call. In
11438 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11439 COMPOUND_EXPR will be an argument which must be evaluated.
11440 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11441 COMPOUND_EXPR in the chain will contain the tree for the simplified
11442 form of the builtin function call. */
11444 static tree
11445 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11447 if (!validate_arg (s1, POINTER_TYPE)
11448 || !validate_arg (s2, POINTER_TYPE))
11449 return NULL_TREE;
11450 else
11452 tree fn;
11453 const char *p1, *p2;
11455 p2 = c_getstr (s2);
11456 if (p2 == NULL)
11457 return NULL_TREE;
11459 p1 = c_getstr (s1);
11460 if (p1 != NULL)
11462 const char *r = strpbrk (p1, p2);
11463 tree tem;
11465 if (r == NULL)
11466 return build_int_cst (TREE_TYPE (s1), 0);
11468 /* Return an offset into the constant string argument. */
11469 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11470 s1, size_int (r - p1));
11471 return fold_convert (type, tem);
11474 if (p2[0] == '\0')
11475 /* strpbrk(x, "") == NULL.
11476 Evaluate and ignore s1 in case it had side-effects. */
11477 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11479 if (p2[1] != '\0')
11480 return NULL_TREE; /* Really call strpbrk. */
11482 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11483 if (!fn)
11484 return NULL_TREE;
11486 /* New argument list transforming strpbrk(s1, s2) to
11487 strchr(s1, s2[0]). */
11488 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11492 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11493 to the call.
11495 Return NULL_TREE if no simplification was possible, otherwise return the
11496 simplified form of the call as a tree.
11498 The simplified form may be a constant or other expression which
11499 computes the same value, but in a more efficient manner (including
11500 calls to other builtin functions).
11502 The call may contain arguments which need to be evaluated, but
11503 which are not useful to determine the result of the call. In
11504 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11505 COMPOUND_EXPR will be an argument which must be evaluated.
11506 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11507 COMPOUND_EXPR in the chain will contain the tree for the simplified
11508 form of the builtin function call. */
11510 static tree
11511 fold_builtin_strcat (tree dst, tree src)
11513 if (!validate_arg (dst, POINTER_TYPE)
11514 || !validate_arg (src, POINTER_TYPE))
11515 return NULL_TREE;
11516 else
11518 const char *p = c_getstr (src);
11520 /* If the string length is zero, return the dst parameter. */
11521 if (p && *p == '\0')
11522 return dst;
11524 return NULL_TREE;
11528 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11529 arguments to the call.
11531 Return NULL_TREE if no simplification was possible, otherwise return the
11532 simplified form of the call as a tree.
11534 The simplified form may be a constant or other expression which
11535 computes the same value, but in a more efficient manner (including
11536 calls to other builtin functions).
11538 The call may contain arguments which need to be evaluated, but
11539 which are not useful to determine the result of the call. In
11540 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11541 COMPOUND_EXPR will be an argument which must be evaluated.
11542 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11543 COMPOUND_EXPR in the chain will contain the tree for the simplified
11544 form of the builtin function call. */
11546 static tree
11547 fold_builtin_strncat (tree dst, tree src, tree len)
11549 if (!validate_arg (dst, POINTER_TYPE)
11550 || !validate_arg (src, POINTER_TYPE)
11551 || !validate_arg (len, INTEGER_TYPE))
11552 return NULL_TREE;
11553 else
11555 const char *p = c_getstr (src);
11557 /* If the requested length is zero, or the src parameter string
11558 length is zero, return the dst parameter. */
11559 if (integer_zerop (len) || (p && *p == '\0'))
11560 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11562 /* If the requested len is greater than or equal to the string
11563 length, call strcat. */
11564 if (TREE_CODE (len) == INTEGER_CST && p
11565 && compare_tree_int (len, strlen (p)) >= 0)
11567 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11569 /* If the replacement _DECL isn't initialized, don't do the
11570 transformation. */
11571 if (!fn)
11572 return NULL_TREE;
11574 return build_call_expr (fn, 2, dst, src);
11576 return NULL_TREE;
11580 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11581 to the call.
11583 Return NULL_TREE if no simplification was possible, otherwise return the
11584 simplified form of the call as a tree.
11586 The simplified form may be a constant or other expression which
11587 computes the same value, but in a more efficient manner (including
11588 calls to other builtin functions).
11590 The call may contain arguments which need to be evaluated, but
11591 which are not useful to determine the result of the call. In
11592 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11593 COMPOUND_EXPR will be an argument which must be evaluated.
11594 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11595 COMPOUND_EXPR in the chain will contain the tree for the simplified
11596 form of the builtin function call. */
11598 static tree
11599 fold_builtin_strspn (tree s1, tree s2)
11601 if (!validate_arg (s1, POINTER_TYPE)
11602 || !validate_arg (s2, POINTER_TYPE))
11603 return NULL_TREE;
11604 else
11606 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11608 /* If both arguments are constants, evaluate at compile-time. */
11609 if (p1 && p2)
11611 const size_t r = strspn (p1, p2);
11612 return size_int (r);
11615 /* If either argument is "", return NULL_TREE. */
11616 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11617 /* Evaluate and ignore both arguments in case either one has
11618 side-effects. */
11619 return omit_two_operands (size_type_node, size_zero_node,
11620 s1, s2);
11621 return NULL_TREE;
11625 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11626 to the call.
11628 Return NULL_TREE if no simplification was possible, otherwise return the
11629 simplified form of the call as a tree.
11631 The simplified form may be a constant or other expression which
11632 computes the same value, but in a more efficient manner (including
11633 calls to other builtin functions).
11635 The call may contain arguments which need to be evaluated, but
11636 which are not useful to determine the result of the call. In
11637 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11638 COMPOUND_EXPR will be an argument which must be evaluated.
11639 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11640 COMPOUND_EXPR in the chain will contain the tree for the simplified
11641 form of the builtin function call. */
11643 static tree
11644 fold_builtin_strcspn (tree s1, tree s2)
11646 if (!validate_arg (s1, POINTER_TYPE)
11647 || !validate_arg (s2, POINTER_TYPE))
11648 return NULL_TREE;
11649 else
11651 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11653 /* If both arguments are constants, evaluate at compile-time. */
11654 if (p1 && p2)
11656 const size_t r = strcspn (p1, p2);
11657 return size_int (r);
11660 /* If the first argument is "", return NULL_TREE. */
11661 if (p1 && *p1 == '\0')
11663 /* Evaluate and ignore argument s2 in case it has
11664 side-effects. */
11665 return omit_one_operand (size_type_node,
11666 size_zero_node, s2);
11669 /* If the second argument is "", return __builtin_strlen(s1). */
11670 if (p2 && *p2 == '\0')
11672 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11674 /* If the replacement _DECL isn't initialized, don't do the
11675 transformation. */
11676 if (!fn)
11677 return NULL_TREE;
11679 return build_call_expr (fn, 1, s1);
11681 return NULL_TREE;
11685 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11686 to the call. IGNORE is true if the value returned
11687 by the builtin will be ignored. UNLOCKED is true is true if this
11688 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11689 the known length of the string. Return NULL_TREE if no simplification
11690 was possible. */
11692 tree
11693 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11695 /* If we're using an unlocked function, assume the other unlocked
11696 functions exist explicitly. */
11697 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11698 : implicit_built_in_decls[BUILT_IN_FPUTC];
11699 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11700 : implicit_built_in_decls[BUILT_IN_FWRITE];
11702 /* If the return value is used, don't do the transformation. */
11703 if (!ignore)
11704 return NULL_TREE;
11706 /* Verify the arguments in the original call. */
11707 if (!validate_arg (arg0, POINTER_TYPE)
11708 || !validate_arg (arg1, POINTER_TYPE))
11709 return NULL_TREE;
11711 if (! len)
11712 len = c_strlen (arg0, 0);
11714 /* Get the length of the string passed to fputs. If the length
11715 can't be determined, punt. */
11716 if (!len
11717 || TREE_CODE (len) != INTEGER_CST)
11718 return NULL_TREE;
11720 switch (compare_tree_int (len, 1))
11722 case -1: /* length is 0, delete the call entirely . */
11723 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11725 case 0: /* length is 1, call fputc. */
11727 const char *p = c_getstr (arg0);
11729 if (p != NULL)
11731 if (fn_fputc)
11732 return build_call_expr (fn_fputc, 2,
11733 build_int_cst (NULL_TREE, p[0]), arg1);
11734 else
11735 return NULL_TREE;
11738 /* FALLTHROUGH */
11739 case 1: /* length is greater than 1, call fwrite. */
11741 /* If optimizing for size keep fputs. */
11742 if (optimize_function_for_size_p (cfun))
11743 return NULL_TREE;
11744 /* New argument list transforming fputs(string, stream) to
11745 fwrite(string, 1, len, stream). */
11746 if (fn_fwrite)
11747 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11748 else
11749 return NULL_TREE;
11751 default:
11752 gcc_unreachable ();
11754 return NULL_TREE;
11757 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11758 produced. False otherwise. This is done so that we don't output the error
11759 or warning twice or three times. */
11761 bool
11762 fold_builtin_next_arg (tree exp, bool va_start_p)
11764 tree fntype = TREE_TYPE (current_function_decl);
11765 int nargs = call_expr_nargs (exp);
11766 tree arg;
11768 if (TYPE_ARG_TYPES (fntype) == 0
11769 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11770 == void_type_node))
11772 error ("%<va_start%> used in function with fixed args");
11773 return true;
11776 if (va_start_p)
11778 if (va_start_p && (nargs != 2))
11780 error ("wrong number of arguments to function %<va_start%>");
11781 return true;
11783 arg = CALL_EXPR_ARG (exp, 1);
11785 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11786 when we checked the arguments and if needed issued a warning. */
11787 else
11789 if (nargs == 0)
11791 /* Evidently an out of date version of <stdarg.h>; can't validate
11792 va_start's second argument, but can still work as intended. */
11793 warning (0, "%<__builtin_next_arg%> called without an argument");
11794 return true;
11796 else if (nargs > 1)
11798 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11799 return true;
11801 arg = CALL_EXPR_ARG (exp, 0);
11804 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11805 or __builtin_next_arg (0) the first time we see it, after checking
11806 the arguments and if needed issuing a warning. */
11807 if (!integer_zerop (arg))
11809 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11811 /* Strip off all nops for the sake of the comparison. This
11812 is not quite the same as STRIP_NOPS. It does more.
11813 We must also strip off INDIRECT_EXPR for C++ reference
11814 parameters. */
11815 while (CONVERT_EXPR_P (arg)
11816 || TREE_CODE (arg) == INDIRECT_REF)
11817 arg = TREE_OPERAND (arg, 0);
11818 if (arg != last_parm)
11820 /* FIXME: Sometimes with the tree optimizers we can get the
11821 not the last argument even though the user used the last
11822 argument. We just warn and set the arg to be the last
11823 argument so that we will get wrong-code because of
11824 it. */
11825 warning (0, "second parameter of %<va_start%> not last named argument");
11828 /* Undefined by C99 7.15.1.4p4 (va_start):
11829 "If the parameter parmN is declared with the register storage
11830 class, with a function or array type, or with a type that is
11831 not compatible with the type that results after application of
11832 the default argument promotions, the behavior is undefined."
11834 else if (DECL_REGISTER (arg))
11835 warning (0, "undefined behaviour when second parameter of "
11836 "%<va_start%> is declared with %<register%> storage");
11838 /* We want to verify the second parameter just once before the tree
11839 optimizers are run and then avoid keeping it in the tree,
11840 as otherwise we could warn even for correct code like:
11841 void foo (int i, ...)
11842 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11843 if (va_start_p)
11844 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11845 else
11846 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11848 return false;
11852 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11853 ORIG may be null if this is a 2-argument call. We don't attempt to
11854 simplify calls with more than 3 arguments.
11856 Return NULL_TREE if no simplification was possible, otherwise return the
11857 simplified form of the call as a tree. If IGNORED is true, it means that
11858 the caller does not use the returned value of the function. */
11860 static tree
11861 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11863 tree call, retval;
11864 const char *fmt_str = NULL;
11866 /* Verify the required arguments in the original call. We deal with two
11867 types of sprintf() calls: 'sprintf (str, fmt)' and
11868 'sprintf (dest, "%s", orig)'. */
11869 if (!validate_arg (dest, POINTER_TYPE)
11870 || !validate_arg (fmt, POINTER_TYPE))
11871 return NULL_TREE;
11872 if (orig && !validate_arg (orig, POINTER_TYPE))
11873 return NULL_TREE;
11875 /* Check whether the format is a literal string constant. */
11876 fmt_str = c_getstr (fmt);
11877 if (fmt_str == NULL)
11878 return NULL_TREE;
11880 call = NULL_TREE;
11881 retval = NULL_TREE;
11883 if (!init_target_chars ())
11884 return NULL_TREE;
11886 /* If the format doesn't contain % args or %%, use strcpy. */
11887 if (strchr (fmt_str, target_percent) == NULL)
11889 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11891 if (!fn)
11892 return NULL_TREE;
11894 /* Don't optimize sprintf (buf, "abc", ptr++). */
11895 if (orig)
11896 return NULL_TREE;
11898 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11899 'format' is known to contain no % formats. */
11900 call = build_call_expr (fn, 2, dest, fmt);
11901 if (!ignored)
11902 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11905 /* If the format is "%s", use strcpy if the result isn't used. */
11906 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11908 tree fn;
11909 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11911 if (!fn)
11912 return NULL_TREE;
11914 /* Don't crash on sprintf (str1, "%s"). */
11915 if (!orig)
11916 return NULL_TREE;
11918 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11919 if (!ignored)
11921 retval = c_strlen (orig, 1);
11922 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11923 return NULL_TREE;
11925 call = build_call_expr (fn, 2, dest, orig);
11928 if (call && retval)
11930 retval = fold_convert
11931 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11932 retval);
11933 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11935 else
11936 return call;
11939 /* Expand a call EXP to __builtin_object_size. */
11942 expand_builtin_object_size (tree exp)
11944 tree ost;
11945 int object_size_type;
11946 tree fndecl = get_callee_fndecl (exp);
11948 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11950 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11951 exp, fndecl);
11952 expand_builtin_trap ();
11953 return const0_rtx;
11956 ost = CALL_EXPR_ARG (exp, 1);
11957 STRIP_NOPS (ost);
11959 if (TREE_CODE (ost) != INTEGER_CST
11960 || tree_int_cst_sgn (ost) < 0
11961 || compare_tree_int (ost, 3) > 0)
11963 error ("%Klast argument of %D is not integer constant between 0 and 3",
11964 exp, fndecl);
11965 expand_builtin_trap ();
11966 return const0_rtx;
11969 object_size_type = tree_low_cst (ost, 0);
11971 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11974 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11975 FCODE is the BUILT_IN_* to use.
11976 Return NULL_RTX if we failed; the caller should emit a normal call,
11977 otherwise try to get the result in TARGET, if convenient (and in
11978 mode MODE if that's convenient). */
11980 static rtx
11981 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11982 enum built_in_function fcode)
11984 tree dest, src, len, size;
11986 if (!validate_arglist (exp,
11987 POINTER_TYPE,
11988 fcode == BUILT_IN_MEMSET_CHK
11989 ? INTEGER_TYPE : POINTER_TYPE,
11990 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11991 return NULL_RTX;
11993 dest = CALL_EXPR_ARG (exp, 0);
11994 src = CALL_EXPR_ARG (exp, 1);
11995 len = CALL_EXPR_ARG (exp, 2);
11996 size = CALL_EXPR_ARG (exp, 3);
11998 if (! host_integerp (size, 1))
11999 return NULL_RTX;
12001 if (host_integerp (len, 1) || integer_all_onesp (size))
12003 tree fn;
12005 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12007 warning_at (tree_nonartificial_location (exp),
12008 0, "%Kcall to %D will always overflow destination buffer",
12009 exp, get_callee_fndecl (exp));
12010 return NULL_RTX;
12013 fn = NULL_TREE;
12014 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12015 mem{cpy,pcpy,move,set} is available. */
12016 switch (fcode)
12018 case BUILT_IN_MEMCPY_CHK:
12019 fn = built_in_decls[BUILT_IN_MEMCPY];
12020 break;
12021 case BUILT_IN_MEMPCPY_CHK:
12022 fn = built_in_decls[BUILT_IN_MEMPCPY];
12023 break;
12024 case BUILT_IN_MEMMOVE_CHK:
12025 fn = built_in_decls[BUILT_IN_MEMMOVE];
12026 break;
12027 case BUILT_IN_MEMSET_CHK:
12028 fn = built_in_decls[BUILT_IN_MEMSET];
12029 break;
12030 default:
12031 break;
12034 if (! fn)
12035 return NULL_RTX;
12037 fn = build_call_expr (fn, 3, dest, src, len);
12038 STRIP_TYPE_NOPS (fn);
12039 while (TREE_CODE (fn) == COMPOUND_EXPR)
12041 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12042 EXPAND_NORMAL);
12043 fn = TREE_OPERAND (fn, 1);
12045 if (TREE_CODE (fn) == CALL_EXPR)
12046 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12047 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12049 else if (fcode == BUILT_IN_MEMSET_CHK)
12050 return NULL_RTX;
12051 else
12053 unsigned int dest_align
12054 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12056 /* If DEST is not a pointer type, call the normal function. */
12057 if (dest_align == 0)
12058 return NULL_RTX;
12060 /* If SRC and DEST are the same (and not volatile), do nothing. */
12061 if (operand_equal_p (src, dest, 0))
12063 tree expr;
12065 if (fcode != BUILT_IN_MEMPCPY_CHK)
12067 /* Evaluate and ignore LEN in case it has side-effects. */
12068 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12069 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12072 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12073 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12076 /* __memmove_chk special case. */
12077 if (fcode == BUILT_IN_MEMMOVE_CHK)
12079 unsigned int src_align
12080 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12082 if (src_align == 0)
12083 return NULL_RTX;
12085 /* If src is categorized for a readonly section we can use
12086 normal __memcpy_chk. */
12087 if (readonly_data_expr (src))
12089 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12090 if (!fn)
12091 return NULL_RTX;
12092 fn = build_call_expr (fn, 4, dest, src, len, size);
12093 STRIP_TYPE_NOPS (fn);
12094 while (TREE_CODE (fn) == COMPOUND_EXPR)
12096 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12097 EXPAND_NORMAL);
12098 fn = TREE_OPERAND (fn, 1);
12100 if (TREE_CODE (fn) == CALL_EXPR)
12101 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12102 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12105 return NULL_RTX;
12109 /* Emit warning if a buffer overflow is detected at compile time. */
12111 static void
12112 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12114 int is_strlen = 0;
12115 tree len, size;
12116 location_t loc = tree_nonartificial_location (exp);
12118 switch (fcode)
12120 case BUILT_IN_STRCPY_CHK:
12121 case BUILT_IN_STPCPY_CHK:
12122 /* For __strcat_chk the warning will be emitted only if overflowing
12123 by at least strlen (dest) + 1 bytes. */
12124 case BUILT_IN_STRCAT_CHK:
12125 len = CALL_EXPR_ARG (exp, 1);
12126 size = CALL_EXPR_ARG (exp, 2);
12127 is_strlen = 1;
12128 break;
12129 case BUILT_IN_STRNCAT_CHK:
12130 case BUILT_IN_STRNCPY_CHK:
12131 len = CALL_EXPR_ARG (exp, 2);
12132 size = CALL_EXPR_ARG (exp, 3);
12133 break;
12134 case BUILT_IN_SNPRINTF_CHK:
12135 case BUILT_IN_VSNPRINTF_CHK:
12136 len = CALL_EXPR_ARG (exp, 1);
12137 size = CALL_EXPR_ARG (exp, 3);
12138 break;
12139 default:
12140 gcc_unreachable ();
12143 if (!len || !size)
12144 return;
12146 if (! host_integerp (size, 1) || integer_all_onesp (size))
12147 return;
12149 if (is_strlen)
12151 len = c_strlen (len, 1);
12152 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12153 return;
12155 else if (fcode == BUILT_IN_STRNCAT_CHK)
12157 tree src = CALL_EXPR_ARG (exp, 1);
12158 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12159 return;
12160 src = c_strlen (src, 1);
12161 if (! src || ! host_integerp (src, 1))
12163 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12164 exp, get_callee_fndecl (exp));
12165 return;
12167 else if (tree_int_cst_lt (src, size))
12168 return;
12170 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12171 return;
12173 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12174 exp, get_callee_fndecl (exp));
12177 /* Emit warning if a buffer overflow is detected at compile time
12178 in __sprintf_chk/__vsprintf_chk calls. */
12180 static void
12181 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12183 tree dest, size, len, fmt, flag;
12184 const char *fmt_str;
12185 int nargs = call_expr_nargs (exp);
12187 /* Verify the required arguments in the original call. */
12189 if (nargs < 4)
12190 return;
12191 dest = CALL_EXPR_ARG (exp, 0);
12192 flag = CALL_EXPR_ARG (exp, 1);
12193 size = CALL_EXPR_ARG (exp, 2);
12194 fmt = CALL_EXPR_ARG (exp, 3);
12196 if (! host_integerp (size, 1) || integer_all_onesp (size))
12197 return;
12199 /* Check whether the format is a literal string constant. */
12200 fmt_str = c_getstr (fmt);
12201 if (fmt_str == NULL)
12202 return;
12204 if (!init_target_chars ())
12205 return;
12207 /* If the format doesn't contain % args or %%, we know its size. */
12208 if (strchr (fmt_str, target_percent) == 0)
12209 len = build_int_cstu (size_type_node, strlen (fmt_str));
12210 /* If the format is "%s" and first ... argument is a string literal,
12211 we know it too. */
12212 else if (fcode == BUILT_IN_SPRINTF_CHK
12213 && strcmp (fmt_str, target_percent_s) == 0)
12215 tree arg;
12217 if (nargs < 5)
12218 return;
12219 arg = CALL_EXPR_ARG (exp, 4);
12220 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12221 return;
12223 len = c_strlen (arg, 1);
12224 if (!len || ! host_integerp (len, 1))
12225 return;
12227 else
12228 return;
12230 if (! tree_int_cst_lt (len, size))
12231 warning_at (tree_nonartificial_location (exp),
12232 0, "%Kcall to %D will always overflow destination buffer",
12233 exp, get_callee_fndecl (exp));
12236 /* Emit warning if a free is called with address of a variable. */
12238 static void
12239 maybe_emit_free_warning (tree exp)
12241 tree arg = CALL_EXPR_ARG (exp, 0);
12243 STRIP_NOPS (arg);
12244 if (TREE_CODE (arg) != ADDR_EXPR)
12245 return;
12247 arg = get_base_address (TREE_OPERAND (arg, 0));
12248 if (arg == NULL || INDIRECT_REF_P (arg))
12249 return;
12251 if (SSA_VAR_P (arg))
12252 warning_at (tree_nonartificial_location (exp),
12253 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12254 else
12255 warning_at (tree_nonartificial_location (exp),
12256 0, "%Kattempt to free a non-heap object", exp);
12259 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12260 if possible. */
12262 tree
12263 fold_builtin_object_size (tree ptr, tree ost)
12265 tree ret = NULL_TREE;
12266 int object_size_type;
12268 if (!validate_arg (ptr, POINTER_TYPE)
12269 || !validate_arg (ost, INTEGER_TYPE))
12270 return NULL_TREE;
12272 STRIP_NOPS (ost);
12274 if (TREE_CODE (ost) != INTEGER_CST
12275 || tree_int_cst_sgn (ost) < 0
12276 || compare_tree_int (ost, 3) > 0)
12277 return NULL_TREE;
12279 object_size_type = tree_low_cst (ost, 0);
12281 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12282 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12283 and (size_t) 0 for types 2 and 3. */
12284 if (TREE_SIDE_EFFECTS (ptr))
12285 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12287 if (TREE_CODE (ptr) == ADDR_EXPR)
12288 ret = build_int_cstu (size_type_node,
12289 compute_builtin_object_size (ptr, object_size_type));
12291 else if (TREE_CODE (ptr) == SSA_NAME)
12293 unsigned HOST_WIDE_INT bytes;
12295 /* If object size is not known yet, delay folding until
12296 later. Maybe subsequent passes will help determining
12297 it. */
12298 bytes = compute_builtin_object_size (ptr, object_size_type);
12299 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12300 ? -1 : 0))
12301 ret = build_int_cstu (size_type_node, bytes);
12304 if (ret)
12306 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12307 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12308 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12309 ret = NULL_TREE;
12312 return ret;
12315 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12316 DEST, SRC, LEN, and SIZE are the arguments to the call.
12317 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12318 code of the builtin. If MAXLEN is not NULL, it is maximum length
12319 passed as third argument. */
12321 tree
12322 fold_builtin_memory_chk (tree fndecl,
12323 tree dest, tree src, tree len, tree size,
12324 tree maxlen, bool ignore,
12325 enum built_in_function fcode)
12327 tree fn;
12329 if (!validate_arg (dest, POINTER_TYPE)
12330 || !validate_arg (src,
12331 (fcode == BUILT_IN_MEMSET_CHK
12332 ? INTEGER_TYPE : POINTER_TYPE))
12333 || !validate_arg (len, INTEGER_TYPE)
12334 || !validate_arg (size, INTEGER_TYPE))
12335 return NULL_TREE;
12337 /* If SRC and DEST are the same (and not volatile), return DEST
12338 (resp. DEST+LEN for __mempcpy_chk). */
12339 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12341 if (fcode != BUILT_IN_MEMPCPY_CHK)
12342 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12343 else
12345 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12346 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12350 if (! host_integerp (size, 1))
12351 return NULL_TREE;
12353 if (! integer_all_onesp (size))
12355 if (! host_integerp (len, 1))
12357 /* If LEN is not constant, try MAXLEN too.
12358 For MAXLEN only allow optimizing into non-_ocs function
12359 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12360 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12362 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12364 /* (void) __mempcpy_chk () can be optimized into
12365 (void) __memcpy_chk (). */
12366 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12367 if (!fn)
12368 return NULL_TREE;
12370 return build_call_expr (fn, 4, dest, src, len, size);
12372 return NULL_TREE;
12375 else
12376 maxlen = len;
12378 if (tree_int_cst_lt (size, maxlen))
12379 return NULL_TREE;
12382 fn = NULL_TREE;
12383 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12384 mem{cpy,pcpy,move,set} is available. */
12385 switch (fcode)
12387 case BUILT_IN_MEMCPY_CHK:
12388 fn = built_in_decls[BUILT_IN_MEMCPY];
12389 break;
12390 case BUILT_IN_MEMPCPY_CHK:
12391 fn = built_in_decls[BUILT_IN_MEMPCPY];
12392 break;
12393 case BUILT_IN_MEMMOVE_CHK:
12394 fn = built_in_decls[BUILT_IN_MEMMOVE];
12395 break;
12396 case BUILT_IN_MEMSET_CHK:
12397 fn = built_in_decls[BUILT_IN_MEMSET];
12398 break;
12399 default:
12400 break;
12403 if (!fn)
12404 return NULL_TREE;
12406 return build_call_expr (fn, 3, dest, src, len);
12409 /* Fold a call to the __st[rp]cpy_chk builtin.
12410 DEST, SRC, and SIZE are the arguments to the call.
12411 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12412 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12413 strings passed as second argument. */
12415 tree
12416 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12417 tree maxlen, bool ignore,
12418 enum built_in_function fcode)
12420 tree len, fn;
12422 if (!validate_arg (dest, POINTER_TYPE)
12423 || !validate_arg (src, POINTER_TYPE)
12424 || !validate_arg (size, INTEGER_TYPE))
12425 return NULL_TREE;
12427 /* If SRC and DEST are the same (and not volatile), return DEST. */
12428 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12429 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12431 if (! host_integerp (size, 1))
12432 return NULL_TREE;
12434 if (! integer_all_onesp (size))
12436 len = c_strlen (src, 1);
12437 if (! len || ! host_integerp (len, 1))
12439 /* If LEN is not constant, try MAXLEN too.
12440 For MAXLEN only allow optimizing into non-_ocs function
12441 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12442 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12444 if (fcode == BUILT_IN_STPCPY_CHK)
12446 if (! ignore)
12447 return NULL_TREE;
12449 /* If return value of __stpcpy_chk is ignored,
12450 optimize into __strcpy_chk. */
12451 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12452 if (!fn)
12453 return NULL_TREE;
12455 return build_call_expr (fn, 3, dest, src, size);
12458 if (! len || TREE_SIDE_EFFECTS (len))
12459 return NULL_TREE;
12461 /* If c_strlen returned something, but not a constant,
12462 transform __strcpy_chk into __memcpy_chk. */
12463 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12464 if (!fn)
12465 return NULL_TREE;
12467 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12468 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12469 build_call_expr (fn, 4,
12470 dest, src, len, size));
12473 else
12474 maxlen = len;
12476 if (! tree_int_cst_lt (maxlen, size))
12477 return NULL_TREE;
12480 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12481 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12482 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12483 if (!fn)
12484 return NULL_TREE;
12486 return build_call_expr (fn, 2, dest, src);
12489 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12490 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12491 length passed as third argument. */
12493 tree
12494 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12495 tree maxlen)
12497 tree fn;
12499 if (!validate_arg (dest, POINTER_TYPE)
12500 || !validate_arg (src, POINTER_TYPE)
12501 || !validate_arg (len, INTEGER_TYPE)
12502 || !validate_arg (size, INTEGER_TYPE))
12503 return NULL_TREE;
12505 if (! host_integerp (size, 1))
12506 return NULL_TREE;
12508 if (! integer_all_onesp (size))
12510 if (! host_integerp (len, 1))
12512 /* If LEN is not constant, try MAXLEN too.
12513 For MAXLEN only allow optimizing into non-_ocs function
12514 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12515 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12516 return NULL_TREE;
12518 else
12519 maxlen = len;
12521 if (tree_int_cst_lt (size, maxlen))
12522 return NULL_TREE;
12525 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12526 fn = built_in_decls[BUILT_IN_STRNCPY];
12527 if (!fn)
12528 return NULL_TREE;
12530 return build_call_expr (fn, 3, dest, src, len);
12533 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12534 are the arguments to the call. */
12536 static tree
12537 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12539 tree fn;
12540 const char *p;
12542 if (!validate_arg (dest, POINTER_TYPE)
12543 || !validate_arg (src, POINTER_TYPE)
12544 || !validate_arg (size, INTEGER_TYPE))
12545 return NULL_TREE;
12547 p = c_getstr (src);
12548 /* If the SRC parameter is "", return DEST. */
12549 if (p && *p == '\0')
12550 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12552 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12553 return NULL_TREE;
12555 /* If __builtin_strcat_chk is used, assume strcat is available. */
12556 fn = built_in_decls[BUILT_IN_STRCAT];
12557 if (!fn)
12558 return NULL_TREE;
12560 return build_call_expr (fn, 2, dest, src);
12563 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12564 LEN, and SIZE. */
12566 static tree
12567 fold_builtin_strncat_chk (tree fndecl,
12568 tree dest, tree src, tree len, tree size)
12570 tree fn;
12571 const char *p;
12573 if (!validate_arg (dest, POINTER_TYPE)
12574 || !validate_arg (src, POINTER_TYPE)
12575 || !validate_arg (size, INTEGER_TYPE)
12576 || !validate_arg (size, INTEGER_TYPE))
12577 return NULL_TREE;
12579 p = c_getstr (src);
12580 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12581 if (p && *p == '\0')
12582 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12583 else if (integer_zerop (len))
12584 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12586 if (! host_integerp (size, 1))
12587 return NULL_TREE;
12589 if (! integer_all_onesp (size))
12591 tree src_len = c_strlen (src, 1);
12592 if (src_len
12593 && host_integerp (src_len, 1)
12594 && host_integerp (len, 1)
12595 && ! tree_int_cst_lt (len, src_len))
12597 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12598 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12599 if (!fn)
12600 return NULL_TREE;
12602 return build_call_expr (fn, 3, dest, src, size);
12604 return NULL_TREE;
12607 /* If __builtin_strncat_chk is used, assume strncat is available. */
12608 fn = built_in_decls[BUILT_IN_STRNCAT];
12609 if (!fn)
12610 return NULL_TREE;
12612 return build_call_expr (fn, 3, dest, src, len);
12615 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12616 a normal call should be emitted rather than expanding the function
12617 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12619 static tree
12620 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12622 tree dest, size, len, fn, fmt, flag;
12623 const char *fmt_str;
12624 int nargs = call_expr_nargs (exp);
12626 /* Verify the required arguments in the original call. */
12627 if (nargs < 4)
12628 return NULL_TREE;
12629 dest = CALL_EXPR_ARG (exp, 0);
12630 if (!validate_arg (dest, POINTER_TYPE))
12631 return NULL_TREE;
12632 flag = CALL_EXPR_ARG (exp, 1);
12633 if (!validate_arg (flag, INTEGER_TYPE))
12634 return NULL_TREE;
12635 size = CALL_EXPR_ARG (exp, 2);
12636 if (!validate_arg (size, INTEGER_TYPE))
12637 return NULL_TREE;
12638 fmt = CALL_EXPR_ARG (exp, 3);
12639 if (!validate_arg (fmt, POINTER_TYPE))
12640 return NULL_TREE;
12642 if (! host_integerp (size, 1))
12643 return NULL_TREE;
12645 len = NULL_TREE;
12647 if (!init_target_chars ())
12648 return NULL_TREE;
12650 /* Check whether the format is a literal string constant. */
12651 fmt_str = c_getstr (fmt);
12652 if (fmt_str != NULL)
12654 /* If the format doesn't contain % args or %%, we know the size. */
12655 if (strchr (fmt_str, target_percent) == 0)
12657 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12658 len = build_int_cstu (size_type_node, strlen (fmt_str));
12660 /* If the format is "%s" and first ... argument is a string literal,
12661 we know the size too. */
12662 else if (fcode == BUILT_IN_SPRINTF_CHK
12663 && strcmp (fmt_str, target_percent_s) == 0)
12665 tree arg;
12667 if (nargs == 5)
12669 arg = CALL_EXPR_ARG (exp, 4);
12670 if (validate_arg (arg, POINTER_TYPE))
12672 len = c_strlen (arg, 1);
12673 if (! len || ! host_integerp (len, 1))
12674 len = NULL_TREE;
12680 if (! integer_all_onesp (size))
12682 if (! len || ! tree_int_cst_lt (len, size))
12683 return NULL_TREE;
12686 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12687 or if format doesn't contain % chars or is "%s". */
12688 if (! integer_zerop (flag))
12690 if (fmt_str == NULL)
12691 return NULL_TREE;
12692 if (strchr (fmt_str, target_percent) != NULL
12693 && strcmp (fmt_str, target_percent_s))
12694 return NULL_TREE;
12697 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12698 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12699 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12700 if (!fn)
12701 return NULL_TREE;
12703 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12706 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12707 a normal call should be emitted rather than expanding the function
12708 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12709 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12710 passed as second argument. */
12712 tree
12713 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12714 enum built_in_function fcode)
12716 tree dest, size, len, fn, fmt, flag;
12717 const char *fmt_str;
12719 /* Verify the required arguments in the original call. */
12720 if (call_expr_nargs (exp) < 5)
12721 return NULL_TREE;
12722 dest = CALL_EXPR_ARG (exp, 0);
12723 if (!validate_arg (dest, POINTER_TYPE))
12724 return NULL_TREE;
12725 len = CALL_EXPR_ARG (exp, 1);
12726 if (!validate_arg (len, INTEGER_TYPE))
12727 return NULL_TREE;
12728 flag = CALL_EXPR_ARG (exp, 2);
12729 if (!validate_arg (flag, INTEGER_TYPE))
12730 return NULL_TREE;
12731 size = CALL_EXPR_ARG (exp, 3);
12732 if (!validate_arg (size, INTEGER_TYPE))
12733 return NULL_TREE;
12734 fmt = CALL_EXPR_ARG (exp, 4);
12735 if (!validate_arg (fmt, POINTER_TYPE))
12736 return NULL_TREE;
12738 if (! host_integerp (size, 1))
12739 return NULL_TREE;
12741 if (! integer_all_onesp (size))
12743 if (! host_integerp (len, 1))
12745 /* If LEN is not constant, try MAXLEN too.
12746 For MAXLEN only allow optimizing into non-_ocs function
12747 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12748 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12749 return NULL_TREE;
12751 else
12752 maxlen = len;
12754 if (tree_int_cst_lt (size, maxlen))
12755 return NULL_TREE;
12758 if (!init_target_chars ())
12759 return NULL_TREE;
12761 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12762 or if format doesn't contain % chars or is "%s". */
12763 if (! integer_zerop (flag))
12765 fmt_str = c_getstr (fmt);
12766 if (fmt_str == NULL)
12767 return NULL_TREE;
12768 if (strchr (fmt_str, target_percent) != NULL
12769 && strcmp (fmt_str, target_percent_s))
12770 return NULL_TREE;
12773 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12774 available. */
12775 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12776 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12777 if (!fn)
12778 return NULL_TREE;
12780 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12783 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12784 FMT and ARG are the arguments to the call; we don't fold cases with
12785 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12787 Return NULL_TREE if no simplification was possible, otherwise return the
12788 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12789 code of the function to be simplified. */
12791 static tree
12792 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12793 enum built_in_function fcode)
12795 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12796 const char *fmt_str = NULL;
12798 /* If the return value is used, don't do the transformation. */
12799 if (! ignore)
12800 return NULL_TREE;
12802 /* Verify the required arguments in the original call. */
12803 if (!validate_arg (fmt, POINTER_TYPE))
12804 return NULL_TREE;
12806 /* Check whether the format is a literal string constant. */
12807 fmt_str = c_getstr (fmt);
12808 if (fmt_str == NULL)
12809 return NULL_TREE;
12811 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12813 /* If we're using an unlocked function, assume the other
12814 unlocked functions exist explicitly. */
12815 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12816 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12818 else
12820 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12821 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12824 if (!init_target_chars ())
12825 return NULL_TREE;
12827 if (strcmp (fmt_str, target_percent_s) == 0
12828 || strchr (fmt_str, target_percent) == NULL)
12830 const char *str;
12832 if (strcmp (fmt_str, target_percent_s) == 0)
12834 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12835 return NULL_TREE;
12837 if (!arg || !validate_arg (arg, POINTER_TYPE))
12838 return NULL_TREE;
12840 str = c_getstr (arg);
12841 if (str == NULL)
12842 return NULL_TREE;
12844 else
12846 /* The format specifier doesn't contain any '%' characters. */
12847 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12848 && arg)
12849 return NULL_TREE;
12850 str = fmt_str;
12853 /* If the string was "", printf does nothing. */
12854 if (str[0] == '\0')
12855 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12857 /* If the string has length of 1, call putchar. */
12858 if (str[1] == '\0')
12860 /* Given printf("c"), (where c is any one character,)
12861 convert "c"[0] to an int and pass that to the replacement
12862 function. */
12863 newarg = build_int_cst (NULL_TREE, str[0]);
12864 if (fn_putchar)
12865 call = build_call_expr (fn_putchar, 1, newarg);
12867 else
12869 /* If the string was "string\n", call puts("string"). */
12870 size_t len = strlen (str);
12871 if ((unsigned char)str[len - 1] == target_newline)
12873 /* Create a NUL-terminated string that's one char shorter
12874 than the original, stripping off the trailing '\n'. */
12875 char *newstr = XALLOCAVEC (char, len);
12876 memcpy (newstr, str, len - 1);
12877 newstr[len - 1] = 0;
12879 newarg = build_string_literal (len, newstr);
12880 if (fn_puts)
12881 call = build_call_expr (fn_puts, 1, newarg);
12883 else
12884 /* We'd like to arrange to call fputs(string,stdout) here,
12885 but we need stdout and don't have a way to get it yet. */
12886 return NULL_TREE;
12890 /* The other optimizations can be done only on the non-va_list variants. */
12891 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12892 return NULL_TREE;
12894 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12895 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12897 if (!arg || !validate_arg (arg, POINTER_TYPE))
12898 return NULL_TREE;
12899 if (fn_puts)
12900 call = build_call_expr (fn_puts, 1, arg);
12903 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12904 else if (strcmp (fmt_str, target_percent_c) == 0)
12906 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12907 return NULL_TREE;
12908 if (fn_putchar)
12909 call = build_call_expr (fn_putchar, 1, arg);
12912 if (!call)
12913 return NULL_TREE;
12915 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12918 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12919 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12920 more than 3 arguments, and ARG may be null in the 2-argument case.
12922 Return NULL_TREE if no simplification was possible, otherwise return the
12923 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12924 code of the function to be simplified. */
12926 static tree
12927 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12928 enum built_in_function fcode)
12930 tree fn_fputc, fn_fputs, call = NULL_TREE;
12931 const char *fmt_str = NULL;
12933 /* If the return value is used, don't do the transformation. */
12934 if (! ignore)
12935 return NULL_TREE;
12937 /* Verify the required arguments in the original call. */
12938 if (!validate_arg (fp, POINTER_TYPE))
12939 return NULL_TREE;
12940 if (!validate_arg (fmt, POINTER_TYPE))
12941 return NULL_TREE;
12943 /* Check whether the format is a literal string constant. */
12944 fmt_str = c_getstr (fmt);
12945 if (fmt_str == NULL)
12946 return NULL_TREE;
12948 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12950 /* If we're using an unlocked function, assume the other
12951 unlocked functions exist explicitly. */
12952 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12953 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12955 else
12957 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12958 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12961 if (!init_target_chars ())
12962 return NULL_TREE;
12964 /* If the format doesn't contain % args or %%, use strcpy. */
12965 if (strchr (fmt_str, target_percent) == NULL)
12967 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12968 && arg)
12969 return NULL_TREE;
12971 /* If the format specifier was "", fprintf does nothing. */
12972 if (fmt_str[0] == '\0')
12974 /* If FP has side-effects, just wait until gimplification is
12975 done. */
12976 if (TREE_SIDE_EFFECTS (fp))
12977 return NULL_TREE;
12979 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12982 /* When "string" doesn't contain %, replace all cases of
12983 fprintf (fp, string) with fputs (string, fp). The fputs
12984 builtin will take care of special cases like length == 1. */
12985 if (fn_fputs)
12986 call = build_call_expr (fn_fputs, 2, fmt, fp);
12989 /* The other optimizations can be done only on the non-va_list variants. */
12990 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12991 return NULL_TREE;
12993 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12994 else if (strcmp (fmt_str, target_percent_s) == 0)
12996 if (!arg || !validate_arg (arg, POINTER_TYPE))
12997 return NULL_TREE;
12998 if (fn_fputs)
12999 call = build_call_expr (fn_fputs, 2, arg, fp);
13002 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13003 else if (strcmp (fmt_str, target_percent_c) == 0)
13005 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13006 return NULL_TREE;
13007 if (fn_fputc)
13008 call = build_call_expr (fn_fputc, 2, arg, fp);
13011 if (!call)
13012 return NULL_TREE;
13013 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
13016 /* Initialize format string characters in the target charset. */
13018 static bool
13019 init_target_chars (void)
13021 static bool init;
13022 if (!init)
13024 target_newline = lang_hooks.to_target_charset ('\n');
13025 target_percent = lang_hooks.to_target_charset ('%');
13026 target_c = lang_hooks.to_target_charset ('c');
13027 target_s = lang_hooks.to_target_charset ('s');
13028 if (target_newline == 0 || target_percent == 0 || target_c == 0
13029 || target_s == 0)
13030 return false;
13032 target_percent_c[0] = target_percent;
13033 target_percent_c[1] = target_c;
13034 target_percent_c[2] = '\0';
13036 target_percent_s[0] = target_percent;
13037 target_percent_s[1] = target_s;
13038 target_percent_s[2] = '\0';
13040 target_percent_s_newline[0] = target_percent;
13041 target_percent_s_newline[1] = target_s;
13042 target_percent_s_newline[2] = target_newline;
13043 target_percent_s_newline[3] = '\0';
13045 init = true;
13047 return true;
13050 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13051 and no overflow/underflow occurred. INEXACT is true if M was not
13052 exactly calculated. TYPE is the tree type for the result. This
13053 function assumes that you cleared the MPFR flags and then
13054 calculated M to see if anything subsequently set a flag prior to
13055 entering this function. Return NULL_TREE if any checks fail. */
13057 static tree
13058 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13060 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13061 overflow/underflow occurred. If -frounding-math, proceed iff the
13062 result of calling FUNC was exact. */
13063 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13064 && (!flag_rounding_math || !inexact))
13066 REAL_VALUE_TYPE rr;
13068 real_from_mpfr (&rr, m, type, GMP_RNDN);
13069 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13070 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13071 but the mpft_t is not, then we underflowed in the
13072 conversion. */
13073 if (real_isfinite (&rr)
13074 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13076 REAL_VALUE_TYPE rmode;
13078 real_convert (&rmode, TYPE_MODE (type), &rr);
13079 /* Proceed iff the specified mode can hold the value. */
13080 if (real_identical (&rmode, &rr))
13081 return build_real (type, rmode);
13084 return NULL_TREE;
13087 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13088 FUNC on it and return the resulting value as a tree with type TYPE.
13089 If MIN and/or MAX are not NULL, then the supplied ARG must be
13090 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13091 acceptable values, otherwise they are not. The mpfr precision is
13092 set to the precision of TYPE. We assume that function FUNC returns
13093 zero if the result could be calculated exactly within the requested
13094 precision. */
13096 static tree
13097 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13098 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13099 bool inclusive)
13101 tree result = NULL_TREE;
13103 STRIP_NOPS (arg);
13105 /* To proceed, MPFR must exactly represent the target floating point
13106 format, which only happens when the target base equals two. */
13107 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13108 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13110 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13112 if (real_isfinite (ra)
13113 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13114 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13116 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13117 const int prec = fmt->p;
13118 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13119 int inexact;
13120 mpfr_t m;
13122 mpfr_init2 (m, prec);
13123 mpfr_from_real (m, ra, GMP_RNDN);
13124 mpfr_clear_flags ();
13125 inexact = func (m, m, rnd);
13126 result = do_mpfr_ckconv (m, type, inexact);
13127 mpfr_clear (m);
13131 return result;
13134 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13135 FUNC on it and return the resulting value as a tree with type TYPE.
13136 The mpfr precision is set to the precision of TYPE. We assume that
13137 function FUNC returns zero if the result could be calculated
13138 exactly within the requested precision. */
13140 static tree
13141 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13142 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13144 tree result = NULL_TREE;
13146 STRIP_NOPS (arg1);
13147 STRIP_NOPS (arg2);
13149 /* To proceed, MPFR must exactly represent the target floating point
13150 format, which only happens when the target base equals two. */
13151 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13152 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13153 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13155 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13156 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13158 if (real_isfinite (ra1) && real_isfinite (ra2))
13160 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13161 const int prec = fmt->p;
13162 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13163 int inexact;
13164 mpfr_t m1, m2;
13166 mpfr_inits2 (prec, m1, m2, NULL);
13167 mpfr_from_real (m1, ra1, GMP_RNDN);
13168 mpfr_from_real (m2, ra2, GMP_RNDN);
13169 mpfr_clear_flags ();
13170 inexact = func (m1, m1, m2, rnd);
13171 result = do_mpfr_ckconv (m1, type, inexact);
13172 mpfr_clears (m1, m2, NULL);
13176 return result;
13179 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13180 FUNC on it and return the resulting value as a tree with type TYPE.
13181 The mpfr precision is set to the precision of TYPE. We assume that
13182 function FUNC returns zero if the result could be calculated
13183 exactly within the requested precision. */
13185 static tree
13186 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13187 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13189 tree result = NULL_TREE;
13191 STRIP_NOPS (arg1);
13192 STRIP_NOPS (arg2);
13193 STRIP_NOPS (arg3);
13195 /* To proceed, MPFR must exactly represent the target floating point
13196 format, which only happens when the target base equals two. */
13197 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13198 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13199 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13200 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13202 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13203 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13204 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13206 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13208 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13209 const int prec = fmt->p;
13210 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13211 int inexact;
13212 mpfr_t m1, m2, m3;
13214 mpfr_inits2 (prec, m1, m2, m3, NULL);
13215 mpfr_from_real (m1, ra1, GMP_RNDN);
13216 mpfr_from_real (m2, ra2, GMP_RNDN);
13217 mpfr_from_real (m3, ra3, GMP_RNDN);
13218 mpfr_clear_flags ();
13219 inexact = func (m1, m1, m2, m3, rnd);
13220 result = do_mpfr_ckconv (m1, type, inexact);
13221 mpfr_clears (m1, m2, m3, NULL);
13225 return result;
13228 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13229 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13230 If ARG_SINP and ARG_COSP are NULL then the result is returned
13231 as a complex value.
13232 The type is taken from the type of ARG and is used for setting the
13233 precision of the calculation and results. */
13235 static tree
13236 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13238 tree const type = TREE_TYPE (arg);
13239 tree result = NULL_TREE;
13241 STRIP_NOPS (arg);
13243 /* To proceed, MPFR must exactly represent the target floating point
13244 format, which only happens when the target base equals two. */
13245 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13246 && TREE_CODE (arg) == REAL_CST
13247 && !TREE_OVERFLOW (arg))
13249 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13251 if (real_isfinite (ra))
13253 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13254 const int prec = fmt->p;
13255 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13256 tree result_s, result_c;
13257 int inexact;
13258 mpfr_t m, ms, mc;
13260 mpfr_inits2 (prec, m, ms, mc, NULL);
13261 mpfr_from_real (m, ra, GMP_RNDN);
13262 mpfr_clear_flags ();
13263 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13264 result_s = do_mpfr_ckconv (ms, type, inexact);
13265 result_c = do_mpfr_ckconv (mc, type, inexact);
13266 mpfr_clears (m, ms, mc, NULL);
13267 if (result_s && result_c)
13269 /* If we are to return in a complex value do so. */
13270 if (!arg_sinp && !arg_cosp)
13271 return build_complex (build_complex_type (type),
13272 result_c, result_s);
13274 /* Dereference the sin/cos pointer arguments. */
13275 arg_sinp = build_fold_indirect_ref (arg_sinp);
13276 arg_cosp = build_fold_indirect_ref (arg_cosp);
13277 /* Proceed if valid pointer type were passed in. */
13278 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13279 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13281 /* Set the values. */
13282 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13283 result_s);
13284 TREE_SIDE_EFFECTS (result_s) = 1;
13285 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13286 result_c);
13287 TREE_SIDE_EFFECTS (result_c) = 1;
13288 /* Combine the assignments into a compound expr. */
13289 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13290 result_s, result_c));
13295 return result;
13298 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13299 two-argument mpfr order N Bessel function FUNC on them and return
13300 the resulting value as a tree with type TYPE. The mpfr precision
13301 is set to the precision of TYPE. We assume that function FUNC
13302 returns zero if the result could be calculated exactly within the
13303 requested precision. */
13304 static tree
13305 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13306 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13307 const REAL_VALUE_TYPE *min, bool inclusive)
13309 tree result = NULL_TREE;
13311 STRIP_NOPS (arg1);
13312 STRIP_NOPS (arg2);
13314 /* To proceed, MPFR must exactly represent the target floating point
13315 format, which only happens when the target base equals two. */
13316 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13317 && host_integerp (arg1, 0)
13318 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13320 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13321 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13323 if (n == (long)n
13324 && real_isfinite (ra)
13325 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13327 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13328 const int prec = fmt->p;
13329 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13330 int inexact;
13331 mpfr_t m;
13333 mpfr_init2 (m, prec);
13334 mpfr_from_real (m, ra, GMP_RNDN);
13335 mpfr_clear_flags ();
13336 inexact = func (m, n, m, rnd);
13337 result = do_mpfr_ckconv (m, type, inexact);
13338 mpfr_clear (m);
13342 return result;
13345 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13346 the pointer *(ARG_QUO) and return the result. The type is taken
13347 from the type of ARG0 and is used for setting the precision of the
13348 calculation and results. */
13350 static tree
13351 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13353 tree const type = TREE_TYPE (arg0);
13354 tree result = NULL_TREE;
13356 STRIP_NOPS (arg0);
13357 STRIP_NOPS (arg1);
13359 /* To proceed, MPFR must exactly represent the target floating point
13360 format, which only happens when the target base equals two. */
13361 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13362 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13363 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13365 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13366 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13368 if (real_isfinite (ra0) && real_isfinite (ra1))
13370 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13371 const int prec = fmt->p;
13372 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13373 tree result_rem;
13374 long integer_quo;
13375 mpfr_t m0, m1;
13377 mpfr_inits2 (prec, m0, m1, NULL);
13378 mpfr_from_real (m0, ra0, GMP_RNDN);
13379 mpfr_from_real (m1, ra1, GMP_RNDN);
13380 mpfr_clear_flags ();
13381 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13382 /* Remquo is independent of the rounding mode, so pass
13383 inexact=0 to do_mpfr_ckconv(). */
13384 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13385 mpfr_clears (m0, m1, NULL);
13386 if (result_rem)
13388 /* MPFR calculates quo in the host's long so it may
13389 return more bits in quo than the target int can hold
13390 if sizeof(host long) > sizeof(target int). This can
13391 happen even for native compilers in LP64 mode. In
13392 these cases, modulo the quo value with the largest
13393 number that the target int can hold while leaving one
13394 bit for the sign. */
13395 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13396 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13398 /* Dereference the quo pointer argument. */
13399 arg_quo = build_fold_indirect_ref (arg_quo);
13400 /* Proceed iff a valid pointer type was passed in. */
13401 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13403 /* Set the value. */
13404 tree result_quo = fold_build2 (MODIFY_EXPR,
13405 TREE_TYPE (arg_quo), arg_quo,
13406 build_int_cst (NULL, integer_quo));
13407 TREE_SIDE_EFFECTS (result_quo) = 1;
13408 /* Combine the quo assignment with the rem. */
13409 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13410 result_quo, result_rem));
13415 return result;
13418 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13419 resulting value as a tree with type TYPE. The mpfr precision is
13420 set to the precision of TYPE. We assume that this mpfr function
13421 returns zero if the result could be calculated exactly within the
13422 requested precision. In addition, the integer pointer represented
13423 by ARG_SG will be dereferenced and set to the appropriate signgam
13424 (-1,1) value. */
13426 static tree
13427 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13429 tree result = NULL_TREE;
13431 STRIP_NOPS (arg);
13433 /* To proceed, MPFR must exactly represent the target floating point
13434 format, which only happens when the target base equals two. Also
13435 verify ARG is a constant and that ARG_SG is an int pointer. */
13436 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13437 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13438 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13439 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13441 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13443 /* In addition to NaN and Inf, the argument cannot be zero or a
13444 negative integer. */
13445 if (real_isfinite (ra)
13446 && ra->cl != rvc_zero
13447 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13449 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13450 const int prec = fmt->p;
13451 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13452 int inexact, sg;
13453 mpfr_t m;
13454 tree result_lg;
13456 mpfr_init2 (m, prec);
13457 mpfr_from_real (m, ra, GMP_RNDN);
13458 mpfr_clear_flags ();
13459 inexact = mpfr_lgamma (m, &sg, m, rnd);
13460 result_lg = do_mpfr_ckconv (m, type, inexact);
13461 mpfr_clear (m);
13462 if (result_lg)
13464 tree result_sg;
13466 /* Dereference the arg_sg pointer argument. */
13467 arg_sg = build_fold_indirect_ref (arg_sg);
13468 /* Assign the signgam value into *arg_sg. */
13469 result_sg = fold_build2 (MODIFY_EXPR,
13470 TREE_TYPE (arg_sg), arg_sg,
13471 build_int_cst (NULL, sg));
13472 TREE_SIDE_EFFECTS (result_sg) = 1;
13473 /* Combine the signgam assignment with the lgamma result. */
13474 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13475 result_sg, result_lg));
13480 return result;
13483 /* FIXME tuples.
13484 The functions below provide an alternate interface for folding
13485 builtin function calls presented as GIMPLE_CALL statements rather
13486 than as CALL_EXPRs. The folded result is still expressed as a
13487 tree. There is too much code duplication in the handling of
13488 varargs functions, and a more intrusive re-factoring would permit
13489 better sharing of code between the tree and statement-based
13490 versions of these functions. */
13492 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13493 along with N new arguments specified as the "..." parameters. SKIP
13494 is the number of arguments in STMT to be omitted. This function is used
13495 to do varargs-to-varargs transformations. */
13497 static tree
13498 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13500 int oldnargs = gimple_call_num_args (stmt);
13501 int nargs = oldnargs - skip + n;
13502 tree fntype = TREE_TYPE (fndecl);
13503 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13504 tree *buffer;
13505 int i, j;
13506 va_list ap;
13508 buffer = XALLOCAVEC (tree, nargs);
13509 va_start (ap, n);
13510 for (i = 0; i < n; i++)
13511 buffer[i] = va_arg (ap, tree);
13512 va_end (ap);
13513 for (j = skip; j < oldnargs; j++, i++)
13514 buffer[i] = gimple_call_arg (stmt, j);
13516 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13519 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13520 a normal call should be emitted rather than expanding the function
13521 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13523 static tree
13524 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13526 tree dest, size, len, fn, fmt, flag;
13527 const char *fmt_str;
13528 int nargs = gimple_call_num_args (stmt);
13530 /* Verify the required arguments in the original call. */
13531 if (nargs < 4)
13532 return NULL_TREE;
13533 dest = gimple_call_arg (stmt, 0);
13534 if (!validate_arg (dest, POINTER_TYPE))
13535 return NULL_TREE;
13536 flag = gimple_call_arg (stmt, 1);
13537 if (!validate_arg (flag, INTEGER_TYPE))
13538 return NULL_TREE;
13539 size = gimple_call_arg (stmt, 2);
13540 if (!validate_arg (size, INTEGER_TYPE))
13541 return NULL_TREE;
13542 fmt = gimple_call_arg (stmt, 3);
13543 if (!validate_arg (fmt, POINTER_TYPE))
13544 return NULL_TREE;
13546 if (! host_integerp (size, 1))
13547 return NULL_TREE;
13549 len = NULL_TREE;
13551 if (!init_target_chars ())
13552 return NULL_TREE;
13554 /* Check whether the format is a literal string constant. */
13555 fmt_str = c_getstr (fmt);
13556 if (fmt_str != NULL)
13558 /* If the format doesn't contain % args or %%, we know the size. */
13559 if (strchr (fmt_str, target_percent) == 0)
13561 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13562 len = build_int_cstu (size_type_node, strlen (fmt_str));
13564 /* If the format is "%s" and first ... argument is a string literal,
13565 we know the size too. */
13566 else if (fcode == BUILT_IN_SPRINTF_CHK
13567 && strcmp (fmt_str, target_percent_s) == 0)
13569 tree arg;
13571 if (nargs == 5)
13573 arg = gimple_call_arg (stmt, 4);
13574 if (validate_arg (arg, POINTER_TYPE))
13576 len = c_strlen (arg, 1);
13577 if (! len || ! host_integerp (len, 1))
13578 len = NULL_TREE;
13584 if (! integer_all_onesp (size))
13586 if (! len || ! tree_int_cst_lt (len, size))
13587 return NULL_TREE;
13590 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13591 or if format doesn't contain % chars or is "%s". */
13592 if (! integer_zerop (flag))
13594 if (fmt_str == NULL)
13595 return NULL_TREE;
13596 if (strchr (fmt_str, target_percent) != NULL
13597 && strcmp (fmt_str, target_percent_s))
13598 return NULL_TREE;
13601 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13602 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13603 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13604 if (!fn)
13605 return NULL_TREE;
13607 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13610 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13611 a normal call should be emitted rather than expanding the function
13612 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13613 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13614 passed as second argument. */
13616 tree
13617 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13618 enum built_in_function fcode)
13620 tree dest, size, len, fn, fmt, flag;
13621 const char *fmt_str;
13623 /* Verify the required arguments in the original call. */
13624 if (gimple_call_num_args (stmt) < 5)
13625 return NULL_TREE;
13626 dest = gimple_call_arg (stmt, 0);
13627 if (!validate_arg (dest, POINTER_TYPE))
13628 return NULL_TREE;
13629 len = gimple_call_arg (stmt, 1);
13630 if (!validate_arg (len, INTEGER_TYPE))
13631 return NULL_TREE;
13632 flag = gimple_call_arg (stmt, 2);
13633 if (!validate_arg (flag, INTEGER_TYPE))
13634 return NULL_TREE;
13635 size = gimple_call_arg (stmt, 3);
13636 if (!validate_arg (size, INTEGER_TYPE))
13637 return NULL_TREE;
13638 fmt = gimple_call_arg (stmt, 4);
13639 if (!validate_arg (fmt, POINTER_TYPE))
13640 return NULL_TREE;
13642 if (! host_integerp (size, 1))
13643 return NULL_TREE;
13645 if (! integer_all_onesp (size))
13647 if (! host_integerp (len, 1))
13649 /* If LEN is not constant, try MAXLEN too.
13650 For MAXLEN only allow optimizing into non-_ocs function
13651 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13652 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13653 return NULL_TREE;
13655 else
13656 maxlen = len;
13658 if (tree_int_cst_lt (size, maxlen))
13659 return NULL_TREE;
13662 if (!init_target_chars ())
13663 return NULL_TREE;
13665 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13666 or if format doesn't contain % chars or is "%s". */
13667 if (! integer_zerop (flag))
13669 fmt_str = c_getstr (fmt);
13670 if (fmt_str == NULL)
13671 return NULL_TREE;
13672 if (strchr (fmt_str, target_percent) != NULL
13673 && strcmp (fmt_str, target_percent_s))
13674 return NULL_TREE;
13677 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13678 available. */
13679 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13680 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13681 if (!fn)
13682 return NULL_TREE;
13684 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13687 /* Builtins with folding operations that operate on "..." arguments
13688 need special handling; we need to store the arguments in a convenient
13689 data structure before attempting any folding. Fortunately there are
13690 only a few builtins that fall into this category. FNDECL is the
13691 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13692 result of the function call is ignored. */
13694 static tree
13695 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13697 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13698 tree ret = NULL_TREE;
13700 switch (fcode)
13702 case BUILT_IN_SPRINTF_CHK:
13703 case BUILT_IN_VSPRINTF_CHK:
13704 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13705 break;
13707 case BUILT_IN_SNPRINTF_CHK:
13708 case BUILT_IN_VSNPRINTF_CHK:
13709 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13711 default:
13712 break;
13714 if (ret)
13716 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13717 TREE_NO_WARNING (ret) = 1;
13718 return ret;
13720 return NULL_TREE;
13723 /* A wrapper function for builtin folding that prevents warnings for
13724 "statement without effect" and the like, caused by removing the
13725 call node earlier than the warning is generated. */
13727 tree
13728 fold_call_stmt (gimple stmt, bool ignore)
13730 tree ret = NULL_TREE;
13731 tree fndecl = gimple_call_fndecl (stmt);
13732 if (fndecl
13733 && TREE_CODE (fndecl) == FUNCTION_DECL
13734 && DECL_BUILT_IN (fndecl)
13735 && !gimple_call_va_arg_pack_p (stmt))
13737 int nargs = gimple_call_num_args (stmt);
13739 if (avoid_folding_inline_builtin (fndecl))
13740 return NULL_TREE;
13741 /* FIXME: Don't use a list in this interface. */
13742 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13744 tree arglist = NULL_TREE;
13745 int i;
13746 for (i = nargs - 1; i >= 0; i--)
13747 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13748 return targetm.fold_builtin (fndecl, arglist, ignore);
13750 else
13752 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13754 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13755 int i;
13756 for (i = 0; i < nargs; i++)
13757 args[i] = gimple_call_arg (stmt, i);
13758 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13760 if (!ret)
13761 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13762 if (ret)
13764 /* Propagate location information from original call to
13765 expansion of builtin. Otherwise things like
13766 maybe_emit_chk_warning, that operate on the expansion
13767 of a builtin, will use the wrong location information. */
13768 if (gimple_has_location (stmt))
13770 tree realret = ret;
13771 if (TREE_CODE (ret) == NOP_EXPR)
13772 realret = TREE_OPERAND (ret, 0);
13773 if (CAN_HAVE_LOCATION_P (realret)
13774 && !EXPR_HAS_LOCATION (realret))
13775 SET_EXPR_LOCATION (realret, gimple_location (stmt));
13776 return realret;
13778 return ret;
13782 return NULL_TREE;