* gfortran.h: Edit comments on GFC_STD_*.
[official-gcc.git] / gcc / builtins.c
blob989b8d740b2d7f93dd1d77d1023be3bfa78d8dd6
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "tree-gimple.h"
32 #include "flags.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "except.h"
36 #include "function.h"
37 #include "insn-config.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "predict.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
67 #undef DEF_BUILTIN
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
85 #endif
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
111 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
112 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
113 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
119 enum machine_mode, int);
120 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
121 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
122 enum machine_mode, int);
123 static rtx expand_builtin_bcopy (tree, int);
124 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
125 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
127 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memcmp (tree, tree, tree);
176 static tree fold_builtin_strcmp (tree, tree);
177 static tree fold_builtin_strncmp (tree, tree, tree);
178 static tree fold_builtin_signbit (tree, tree);
179 static tree fold_builtin_copysign (tree, tree, tree, tree);
180 static tree fold_builtin_isascii (tree);
181 static tree fold_builtin_toascii (tree);
182 static tree fold_builtin_isdigit (tree);
183 static tree fold_builtin_fabs (tree, tree);
184 static tree fold_builtin_abs (tree, tree);
185 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
186 enum tree_code);
187 static tree fold_builtin_n (tree, tree *, int, bool);
188 static tree fold_builtin_0 (tree, bool);
189 static tree fold_builtin_1 (tree, tree, bool);
190 static tree fold_builtin_2 (tree, tree, tree, bool);
191 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
192 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
193 static tree fold_builtin_varargs (tree, tree, bool);
195 static tree fold_builtin_strpbrk (tree, tree, tree);
196 static tree fold_builtin_strstr (tree, tree, tree);
197 static tree fold_builtin_strrchr (tree, tree, tree);
198 static tree fold_builtin_strcat (tree, tree);
199 static tree fold_builtin_strncat (tree, tree, tree);
200 static tree fold_builtin_strspn (tree, tree);
201 static tree fold_builtin_strcspn (tree, tree);
202 static tree fold_builtin_sprintf (tree, tree, tree, int);
204 static rtx expand_builtin_object_size (tree);
205 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
206 enum built_in_function);
207 static void maybe_emit_chk_warning (tree, enum built_in_function);
208 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
209 static tree fold_builtin_object_size (tree, tree);
210 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
211 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
212 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
213 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
214 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
215 enum built_in_function);
216 static bool init_target_chars (void);
218 static unsigned HOST_WIDE_INT target_newline;
219 static unsigned HOST_WIDE_INT target_percent;
220 static unsigned HOST_WIDE_INT target_c;
221 static unsigned HOST_WIDE_INT target_s;
222 static char target_percent_c[3];
223 static char target_percent_s[3];
224 static char target_percent_s_newline[4];
225 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
226 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
227 static tree do_mpfr_arg2 (tree, tree, tree,
228 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
229 static tree do_mpfr_arg3 (tree, tree, tree, tree,
230 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
231 static tree do_mpfr_sincos (tree, tree, tree);
233 /* Return true if NODE should be considered for inline expansion regardless
234 of the optimization level. This means whenever a function is invoked with
235 its "internal" name, which normally contains the prefix "__builtin". */
237 static bool called_as_built_in (tree node)
239 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
240 if (strncmp (name, "__builtin_", 10) == 0)
241 return true;
242 if (strncmp (name, "__sync_", 7) == 0)
243 return true;
244 return false;
247 /* Return the alignment in bits of EXP, a pointer valued expression.
248 But don't return more than MAX_ALIGN no matter what.
249 The alignment returned is, by default, the alignment of the thing that
250 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
252 Otherwise, look at the expression to see if we can do better, i.e., if the
253 expression is actually pointing at an object whose alignment is tighter. */
256 get_pointer_alignment (tree exp, unsigned int max_align)
258 unsigned int align, inner;
260 /* We rely on TER to compute accurate alignment information. */
261 if (!(optimize && flag_tree_ter))
262 return 0;
264 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
265 return 0;
267 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
268 align = MIN (align, max_align);
270 while (1)
272 switch (TREE_CODE (exp))
274 case NOP_EXPR:
275 case CONVERT_EXPR:
276 case NON_LVALUE_EXPR:
277 exp = TREE_OPERAND (exp, 0);
278 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
279 return align;
281 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
282 align = MIN (inner, max_align);
283 break;
285 case PLUS_EXPR:
286 /* If sum of pointer + int, restrict our maximum alignment to that
287 imposed by the integer. If not, we can't do any better than
288 ALIGN. */
289 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
290 return align;
292 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
293 & (max_align / BITS_PER_UNIT - 1))
294 != 0)
295 max_align >>= 1;
297 exp = TREE_OPERAND (exp, 0);
298 break;
300 case ADDR_EXPR:
301 /* See what we are pointing at and look at its alignment. */
302 exp = TREE_OPERAND (exp, 0);
303 inner = max_align;
304 if (handled_component_p (exp))
306 HOST_WIDE_INT bitsize, bitpos;
307 tree offset;
308 enum machine_mode mode;
309 int unsignedp, volatilep;
311 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
312 &mode, &unsignedp, &volatilep, true);
313 if (bitpos)
314 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
315 if (offset && TREE_CODE (offset) == PLUS_EXPR
316 && host_integerp (TREE_OPERAND (offset, 1), 1))
318 /* Any overflow in calculating offset_bits won't change
319 the alignment. */
320 unsigned offset_bits
321 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
322 * BITS_PER_UNIT);
324 if (offset_bits)
325 inner = MIN (inner, (offset_bits & -offset_bits));
326 offset = TREE_OPERAND (offset, 0);
328 if (offset && TREE_CODE (offset) == MULT_EXPR
329 && host_integerp (TREE_OPERAND (offset, 1), 1))
331 /* Any overflow in calculating offset_factor won't change
332 the alignment. */
333 unsigned offset_factor
334 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
335 * BITS_PER_UNIT);
337 if (offset_factor)
338 inner = MIN (inner, (offset_factor & -offset_factor));
340 else if (offset)
341 inner = MIN (inner, BITS_PER_UNIT);
343 if (TREE_CODE (exp) == FUNCTION_DECL)
344 align = FUNCTION_BOUNDARY;
345 else if (DECL_P (exp))
346 align = MIN (inner, DECL_ALIGN (exp));
347 #ifdef CONSTANT_ALIGNMENT
348 else if (CONSTANT_CLASS_P (exp))
349 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
350 #endif
351 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
352 || TREE_CODE (exp) == INDIRECT_REF)
353 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
354 else
355 align = MIN (align, inner);
356 return MIN (align, max_align);
358 default:
359 return align;
364 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
365 way, because it could contain a zero byte in the middle.
366 TREE_STRING_LENGTH is the size of the character array, not the string.
368 ONLY_VALUE should be nonzero if the result is not going to be emitted
369 into the instruction stream and zero if it is going to be expanded.
370 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
371 is returned, otherwise NULL, since
372 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
373 evaluate the side-effects.
375 The value returned is of type `ssizetype'.
377 Unfortunately, string_constant can't access the values of const char
378 arrays with initializers, so neither can we do so here. */
380 tree
381 c_strlen (tree src, int only_value)
383 tree offset_node;
384 HOST_WIDE_INT offset;
385 int max;
386 const char *ptr;
388 STRIP_NOPS (src);
389 if (TREE_CODE (src) == COND_EXPR
390 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
392 tree len1, len2;
394 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
395 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
396 if (tree_int_cst_equal (len1, len2))
397 return len1;
400 if (TREE_CODE (src) == COMPOUND_EXPR
401 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
402 return c_strlen (TREE_OPERAND (src, 1), only_value);
404 src = string_constant (src, &offset_node);
405 if (src == 0)
406 return NULL_TREE;
408 max = TREE_STRING_LENGTH (src) - 1;
409 ptr = TREE_STRING_POINTER (src);
411 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
413 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
414 compute the offset to the following null if we don't know where to
415 start searching for it. */
416 int i;
418 for (i = 0; i < max; i++)
419 if (ptr[i] == 0)
420 return NULL_TREE;
422 /* We don't know the starting offset, but we do know that the string
423 has no internal zero bytes. We can assume that the offset falls
424 within the bounds of the string; otherwise, the programmer deserves
425 what he gets. Subtract the offset from the length of the string,
426 and return that. This would perhaps not be valid if we were dealing
427 with named arrays in addition to literal string constants. */
429 return size_diffop (size_int (max), offset_node);
432 /* We have a known offset into the string. Start searching there for
433 a null character if we can represent it as a single HOST_WIDE_INT. */
434 if (offset_node == 0)
435 offset = 0;
436 else if (! host_integerp (offset_node, 0))
437 offset = -1;
438 else
439 offset = tree_low_cst (offset_node, 0);
441 /* If the offset is known to be out of bounds, warn, and call strlen at
442 runtime. */
443 if (offset < 0 || offset > max)
445 warning (0, "offset outside bounds of constant string");
446 return NULL_TREE;
449 /* Use strlen to search for the first zero byte. Since any strings
450 constructed with build_string will have nulls appended, we win even
451 if we get handed something like (char[4])"abcd".
453 Since OFFSET is our starting index into the string, no further
454 calculation is needed. */
455 return ssize_int (strlen (ptr + offset));
458 /* Return a char pointer for a C string if it is a string constant
459 or sum of string constant and integer constant. */
461 static const char *
462 c_getstr (tree src)
464 tree offset_node;
466 src = string_constant (src, &offset_node);
467 if (src == 0)
468 return 0;
470 if (offset_node == 0)
471 return TREE_STRING_POINTER (src);
472 else if (!host_integerp (offset_node, 1)
473 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
474 return 0;
476 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
479 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
480 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
482 static rtx
483 c_readstr (const char *str, enum machine_mode mode)
485 HOST_WIDE_INT c[2];
486 HOST_WIDE_INT ch;
487 unsigned int i, j;
489 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
491 c[0] = 0;
492 c[1] = 0;
493 ch = 1;
494 for (i = 0; i < GET_MODE_SIZE (mode); i++)
496 j = i;
497 if (WORDS_BIG_ENDIAN)
498 j = GET_MODE_SIZE (mode) - i - 1;
499 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
500 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
501 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
502 j *= BITS_PER_UNIT;
503 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
505 if (ch)
506 ch = (unsigned char) str[i];
507 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
509 return immed_double_const (c[0], c[1], mode);
512 /* Cast a target constant CST to target CHAR and if that value fits into
513 host char type, return zero and put that value into variable pointed to by
514 P. */
516 static int
517 target_char_cast (tree cst, char *p)
519 unsigned HOST_WIDE_INT val, hostval;
521 if (!host_integerp (cst, 1)
522 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
523 return 1;
525 val = tree_low_cst (cst, 1);
526 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
527 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
529 hostval = val;
530 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
531 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
533 if (val != hostval)
534 return 1;
536 *p = hostval;
537 return 0;
540 /* Similar to save_expr, but assumes that arbitrary code is not executed
541 in between the multiple evaluations. In particular, we assume that a
542 non-addressable local variable will not be modified. */
544 static tree
545 builtin_save_expr (tree exp)
547 if (TREE_ADDRESSABLE (exp) == 0
548 && (TREE_CODE (exp) == PARM_DECL
549 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
550 return exp;
552 return save_expr (exp);
555 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
556 times to get the address of either a higher stack frame, or a return
557 address located within it (depending on FNDECL_CODE). */
559 static rtx
560 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
562 int i;
564 #ifdef INITIAL_FRAME_ADDRESS_RTX
565 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
566 #else
567 rtx tem;
569 /* For a zero count with __builtin_return_address, we don't care what
570 frame address we return, because target-specific definitions will
571 override us. Therefore frame pointer elimination is OK, and using
572 the soft frame pointer is OK.
574 For a nonzero count, or a zero count with __builtin_frame_address,
575 we require a stable offset from the current frame pointer to the
576 previous one, so we must use the hard frame pointer, and
577 we must disable frame pointer elimination. */
578 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
579 tem = frame_pointer_rtx;
580 else
582 tem = hard_frame_pointer_rtx;
584 /* Tell reload not to eliminate the frame pointer. */
585 current_function_accesses_prior_frames = 1;
587 #endif
589 /* Some machines need special handling before we can access
590 arbitrary frames. For example, on the SPARC, we must first flush
591 all register windows to the stack. */
592 #ifdef SETUP_FRAME_ADDRESSES
593 if (count > 0)
594 SETUP_FRAME_ADDRESSES ();
595 #endif
597 /* On the SPARC, the return address is not in the frame, it is in a
598 register. There is no way to access it off of the current frame
599 pointer, but it can be accessed off the previous frame pointer by
600 reading the value from the register window save area. */
601 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
602 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
603 count--;
604 #endif
606 /* Scan back COUNT frames to the specified frame. */
607 for (i = 0; i < count; i++)
609 /* Assume the dynamic chain pointer is in the word that the
610 frame address points to, unless otherwise specified. */
611 #ifdef DYNAMIC_CHAIN_ADDRESS
612 tem = DYNAMIC_CHAIN_ADDRESS (tem);
613 #endif
614 tem = memory_address (Pmode, tem);
615 tem = gen_frame_mem (Pmode, tem);
616 tem = copy_to_reg (tem);
619 /* For __builtin_frame_address, return what we've got. But, on
620 the SPARC for example, we may have to add a bias. */
621 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
622 #ifdef FRAME_ADDR_RTX
623 return FRAME_ADDR_RTX (tem);
624 #else
625 return tem;
626 #endif
628 /* For __builtin_return_address, get the return address from that frame. */
629 #ifdef RETURN_ADDR_RTX
630 tem = RETURN_ADDR_RTX (count, tem);
631 #else
632 tem = memory_address (Pmode,
633 plus_constant (tem, GET_MODE_SIZE (Pmode)));
634 tem = gen_frame_mem (Pmode, tem);
635 #endif
636 return tem;
639 /* Alias set used for setjmp buffer. */
640 static HOST_WIDE_INT setjmp_alias_set = -1;
642 /* Construct the leading half of a __builtin_setjmp call. Control will
643 return to RECEIVER_LABEL. This is also called directly by the SJLJ
644 exception handling code. */
646 void
647 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
649 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
650 rtx stack_save;
651 rtx mem;
653 if (setjmp_alias_set == -1)
654 setjmp_alias_set = new_alias_set ();
656 buf_addr = convert_memory_address (Pmode, buf_addr);
658 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
660 /* We store the frame pointer and the address of receiver_label in
661 the buffer and use the rest of it for the stack save area, which
662 is machine-dependent. */
664 mem = gen_rtx_MEM (Pmode, buf_addr);
665 set_mem_alias_set (mem, setjmp_alias_set);
666 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
668 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
669 set_mem_alias_set (mem, setjmp_alias_set);
671 emit_move_insn (validize_mem (mem),
672 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
674 stack_save = gen_rtx_MEM (sa_mode,
675 plus_constant (buf_addr,
676 2 * GET_MODE_SIZE (Pmode)));
677 set_mem_alias_set (stack_save, setjmp_alias_set);
678 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
680 /* If there is further processing to do, do it. */
681 #ifdef HAVE_builtin_setjmp_setup
682 if (HAVE_builtin_setjmp_setup)
683 emit_insn (gen_builtin_setjmp_setup (buf_addr));
684 #endif
686 /* Tell optimize_save_area_alloca that extra work is going to
687 need to go on during alloca. */
688 current_function_calls_setjmp = 1;
690 /* Set this so all the registers get saved in our frame; we need to be
691 able to copy the saved values for any registers from frames we unwind. */
692 current_function_has_nonlocal_label = 1;
695 /* Construct the trailing part of a __builtin_setjmp call. This is
696 also called directly by the SJLJ exception handling code. */
698 void
699 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
701 /* Clobber the FP when we get here, so we have to make sure it's
702 marked as used by this function. */
703 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
705 /* Mark the static chain as clobbered here so life information
706 doesn't get messed up for it. */
707 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
709 /* Now put in the code to restore the frame pointer, and argument
710 pointer, if needed. */
711 #ifdef HAVE_nonlocal_goto
712 if (! HAVE_nonlocal_goto)
713 #endif
715 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
716 /* This might change the hard frame pointer in ways that aren't
717 apparent to early optimization passes, so force a clobber. */
718 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
721 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
722 if (fixed_regs[ARG_POINTER_REGNUM])
724 #ifdef ELIMINABLE_REGS
725 size_t i;
726 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
728 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
729 if (elim_regs[i].from == ARG_POINTER_REGNUM
730 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
731 break;
733 if (i == ARRAY_SIZE (elim_regs))
734 #endif
736 /* Now restore our arg pointer from the address at which it
737 was saved in our stack frame. */
738 emit_move_insn (virtual_incoming_args_rtx,
739 copy_to_reg (get_arg_pointer_save_area (cfun)));
742 #endif
744 #ifdef HAVE_builtin_setjmp_receiver
745 if (HAVE_builtin_setjmp_receiver)
746 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
747 else
748 #endif
749 #ifdef HAVE_nonlocal_goto_receiver
750 if (HAVE_nonlocal_goto_receiver)
751 emit_insn (gen_nonlocal_goto_receiver ());
752 else
753 #endif
754 { /* Nothing */ }
756 /* @@@ This is a kludge. Not all machine descriptions define a blockage
757 insn, but we must not allow the code we just generated to be reordered
758 by scheduling. Specifically, the update of the frame pointer must
759 happen immediately, not later. So emit an ASM_INPUT to act as blockage
760 insn. */
761 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
764 /* __builtin_longjmp is passed a pointer to an array of five words (not
765 all will be used on all machines). It operates similarly to the C
766 library function of the same name, but is more efficient. Much of
767 the code below is copied from the handling of non-local gotos. */
769 static void
770 expand_builtin_longjmp (rtx buf_addr, rtx value)
772 rtx fp, lab, stack, insn, last;
773 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
775 if (setjmp_alias_set == -1)
776 setjmp_alias_set = new_alias_set ();
778 buf_addr = convert_memory_address (Pmode, buf_addr);
780 buf_addr = force_reg (Pmode, buf_addr);
782 /* We used to store value in static_chain_rtx, but that fails if pointers
783 are smaller than integers. We instead require that the user must pass
784 a second argument of 1, because that is what builtin_setjmp will
785 return. This also makes EH slightly more efficient, since we are no
786 longer copying around a value that we don't care about. */
787 gcc_assert (value == const1_rtx);
789 last = get_last_insn ();
790 #ifdef HAVE_builtin_longjmp
791 if (HAVE_builtin_longjmp)
792 emit_insn (gen_builtin_longjmp (buf_addr));
793 else
794 #endif
796 fp = gen_rtx_MEM (Pmode, buf_addr);
797 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
798 GET_MODE_SIZE (Pmode)));
800 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
801 2 * GET_MODE_SIZE (Pmode)));
802 set_mem_alias_set (fp, setjmp_alias_set);
803 set_mem_alias_set (lab, setjmp_alias_set);
804 set_mem_alias_set (stack, setjmp_alias_set);
806 /* Pick up FP, label, and SP from the block and jump. This code is
807 from expand_goto in stmt.c; see there for detailed comments. */
808 #ifdef HAVE_nonlocal_goto
809 if (HAVE_nonlocal_goto)
810 /* We have to pass a value to the nonlocal_goto pattern that will
811 get copied into the static_chain pointer, but it does not matter
812 what that value is, because builtin_setjmp does not use it. */
813 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
814 else
815 #endif
817 lab = copy_to_reg (lab);
819 emit_insn (gen_rtx_CLOBBER (VOIDmode,
820 gen_rtx_MEM (BLKmode,
821 gen_rtx_SCRATCH (VOIDmode))));
822 emit_insn (gen_rtx_CLOBBER (VOIDmode,
823 gen_rtx_MEM (BLKmode,
824 hard_frame_pointer_rtx)));
826 emit_move_insn (hard_frame_pointer_rtx, fp);
827 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
829 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
830 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
831 emit_indirect_jump (lab);
835 /* Search backwards and mark the jump insn as a non-local goto.
836 Note that this precludes the use of __builtin_longjmp to a
837 __builtin_setjmp target in the same function. However, we've
838 already cautioned the user that these functions are for
839 internal exception handling use only. */
840 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
842 gcc_assert (insn != last);
844 if (JUMP_P (insn))
846 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
847 REG_NOTES (insn));
848 break;
850 else if (CALL_P (insn))
851 break;
855 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
856 and the address of the save area. */
858 static rtx
859 expand_builtin_nonlocal_goto (tree exp)
861 tree t_label, t_save_area;
862 rtx r_label, r_save_area, r_fp, r_sp, insn;
864 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
865 return NULL_RTX;
867 t_label = CALL_EXPR_ARG (exp, 0);
868 t_save_area = CALL_EXPR_ARG (exp, 1);
870 r_label = expand_normal (t_label);
871 r_label = convert_memory_address (Pmode, r_label);
872 r_save_area = expand_normal (t_save_area);
873 r_save_area = convert_memory_address (Pmode, r_save_area);
874 r_fp = gen_rtx_MEM (Pmode, r_save_area);
875 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
876 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
878 current_function_has_nonlocal_goto = 1;
880 #ifdef HAVE_nonlocal_goto
881 /* ??? We no longer need to pass the static chain value, afaik. */
882 if (HAVE_nonlocal_goto)
883 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
884 else
885 #endif
887 r_label = copy_to_reg (r_label);
889 emit_insn (gen_rtx_CLOBBER (VOIDmode,
890 gen_rtx_MEM (BLKmode,
891 gen_rtx_SCRATCH (VOIDmode))));
893 emit_insn (gen_rtx_CLOBBER (VOIDmode,
894 gen_rtx_MEM (BLKmode,
895 hard_frame_pointer_rtx)));
897 /* Restore frame pointer for containing function.
898 This sets the actual hard register used for the frame pointer
899 to the location of the function's incoming static chain info.
900 The non-local goto handler will then adjust it to contain the
901 proper value and reload the argument pointer, if needed. */
902 emit_move_insn (hard_frame_pointer_rtx, r_fp);
903 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
905 /* USE of hard_frame_pointer_rtx added for consistency;
906 not clear if really needed. */
907 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
908 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
909 emit_indirect_jump (r_label);
912 /* Search backwards to the jump insn and mark it as a
913 non-local goto. */
914 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
916 if (JUMP_P (insn))
918 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
919 const0_rtx, REG_NOTES (insn));
920 break;
922 else if (CALL_P (insn))
923 break;
926 return const0_rtx;
929 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
930 (not all will be used on all machines) that was passed to __builtin_setjmp.
931 It updates the stack pointer in that block to correspond to the current
932 stack pointer. */
934 static void
935 expand_builtin_update_setjmp_buf (rtx buf_addr)
937 enum machine_mode sa_mode = Pmode;
938 rtx stack_save;
941 #ifdef HAVE_save_stack_nonlocal
942 if (HAVE_save_stack_nonlocal)
943 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
944 #endif
945 #ifdef STACK_SAVEAREA_MODE
946 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
947 #endif
949 stack_save
950 = gen_rtx_MEM (sa_mode,
951 memory_address
952 (sa_mode,
953 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
955 #ifdef HAVE_setjmp
956 if (HAVE_setjmp)
957 emit_insn (gen_setjmp ());
958 #endif
960 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
963 /* Expand a call to __builtin_prefetch. For a target that does not support
964 data prefetch, evaluate the memory address argument in case it has side
965 effects. */
967 static void
968 expand_builtin_prefetch (tree exp)
970 tree arg0, arg1, arg2;
971 int nargs;
972 rtx op0, op1, op2;
974 if (!validate_arglist (exp, POINTER_TYPE, 0))
975 return;
977 arg0 = CALL_EXPR_ARG (exp, 0);
979 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
980 zero (read) and argument 2 (locality) defaults to 3 (high degree of
981 locality). */
982 nargs = call_expr_nargs (exp);
983 if (nargs > 1)
984 arg1 = CALL_EXPR_ARG (exp, 1);
985 else
986 arg1 = integer_zero_node;
987 if (nargs > 2)
988 arg2 = CALL_EXPR_ARG (exp, 2);
989 else
990 arg2 = build_int_cst (NULL_TREE, 3);
992 /* Argument 0 is an address. */
993 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
995 /* Argument 1 (read/write flag) must be a compile-time constant int. */
996 if (TREE_CODE (arg1) != INTEGER_CST)
998 error ("second argument to %<__builtin_prefetch%> must be a constant");
999 arg1 = integer_zero_node;
1001 op1 = expand_normal (arg1);
1002 /* Argument 1 must be either zero or one. */
1003 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1005 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1006 " using zero");
1007 op1 = const0_rtx;
1010 /* Argument 2 (locality) must be a compile-time constant int. */
1011 if (TREE_CODE (arg2) != INTEGER_CST)
1013 error ("third argument to %<__builtin_prefetch%> must be a constant");
1014 arg2 = integer_zero_node;
1016 op2 = expand_normal (arg2);
1017 /* Argument 2 must be 0, 1, 2, or 3. */
1018 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1020 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1021 op2 = const0_rtx;
1024 #ifdef HAVE_prefetch
1025 if (HAVE_prefetch)
1027 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1028 (op0,
1029 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1030 || (GET_MODE (op0) != Pmode))
1032 op0 = convert_memory_address (Pmode, op0);
1033 op0 = force_reg (Pmode, op0);
1035 emit_insn (gen_prefetch (op0, op1, op2));
1037 #endif
1039 /* Don't do anything with direct references to volatile memory, but
1040 generate code to handle other side effects. */
1041 if (!MEM_P (op0) && side_effects_p (op0))
1042 emit_insn (op0);
1045 /* Get a MEM rtx for expression EXP which is the address of an operand
1046 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1047 the maximum length of the block of memory that might be accessed or
1048 NULL if unknown. */
1050 static rtx
1051 get_memory_rtx (tree exp, tree len)
1053 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1054 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1056 /* Get an expression we can use to find the attributes to assign to MEM.
1057 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1058 we can. First remove any nops. */
1059 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1060 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1061 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1062 exp = TREE_OPERAND (exp, 0);
1064 if (TREE_CODE (exp) == ADDR_EXPR)
1065 exp = TREE_OPERAND (exp, 0);
1066 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1067 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1068 else
1069 exp = NULL;
1071 /* Honor attributes derived from exp, except for the alias set
1072 (as builtin stringops may alias with anything) and the size
1073 (as stringops may access multiple array elements). */
1074 if (exp)
1076 set_mem_attributes (mem, exp, 0);
1078 /* Allow the string and memory builtins to overflow from one
1079 field into another, see http://gcc.gnu.org/PR23561.
1080 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1081 memory accessed by the string or memory builtin will fit
1082 within the field. */
1083 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1085 tree mem_expr = MEM_EXPR (mem);
1086 HOST_WIDE_INT offset = -1, length = -1;
1087 tree inner = exp;
1089 while (TREE_CODE (inner) == ARRAY_REF
1090 || TREE_CODE (inner) == NOP_EXPR
1091 || TREE_CODE (inner) == CONVERT_EXPR
1092 || TREE_CODE (inner) == NON_LVALUE_EXPR
1093 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1094 || TREE_CODE (inner) == SAVE_EXPR)
1095 inner = TREE_OPERAND (inner, 0);
1097 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1099 if (MEM_OFFSET (mem)
1100 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1101 offset = INTVAL (MEM_OFFSET (mem));
1103 if (offset >= 0 && len && host_integerp (len, 0))
1104 length = tree_low_cst (len, 0);
1106 while (TREE_CODE (inner) == COMPONENT_REF)
1108 tree field = TREE_OPERAND (inner, 1);
1109 gcc_assert (! DECL_BIT_FIELD (field));
1110 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1111 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1113 if (length >= 0
1114 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1115 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1117 HOST_WIDE_INT size
1118 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1119 /* If we can prove the memory starting at XEXP (mem, 0)
1120 and ending at XEXP (mem, 0) + LENGTH will fit into
1121 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1122 if (offset <= size
1123 && length <= size
1124 && offset + length <= size)
1125 break;
1128 if (offset >= 0
1129 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1130 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1131 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1132 / BITS_PER_UNIT;
1133 else
1135 offset = -1;
1136 length = -1;
1139 mem_expr = TREE_OPERAND (mem_expr, 0);
1140 inner = TREE_OPERAND (inner, 0);
1143 if (mem_expr == NULL)
1144 offset = -1;
1145 if (mem_expr != MEM_EXPR (mem))
1147 set_mem_expr (mem, mem_expr);
1148 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1151 set_mem_alias_set (mem, 0);
1152 set_mem_size (mem, NULL_RTX);
1155 return mem;
1158 /* Built-in functions to perform an untyped call and return. */
1160 /* For each register that may be used for calling a function, this
1161 gives a mode used to copy the register's value. VOIDmode indicates
1162 the register is not used for calling a function. If the machine
1163 has register windows, this gives only the outbound registers.
1164 INCOMING_REGNO gives the corresponding inbound register. */
1165 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1167 /* For each register that may be used for returning values, this gives
1168 a mode used to copy the register's value. VOIDmode indicates the
1169 register is not used for returning values. If the machine has
1170 register windows, this gives only the outbound registers.
1171 INCOMING_REGNO gives the corresponding inbound register. */
1172 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1174 /* For each register that may be used for calling a function, this
1175 gives the offset of that register into the block returned by
1176 __builtin_apply_args. 0 indicates that the register is not
1177 used for calling a function. */
1178 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1180 /* Return the size required for the block returned by __builtin_apply_args,
1181 and initialize apply_args_mode. */
1183 static int
1184 apply_args_size (void)
1186 static int size = -1;
1187 int align;
1188 unsigned int regno;
1189 enum machine_mode mode;
1191 /* The values computed by this function never change. */
1192 if (size < 0)
1194 /* The first value is the incoming arg-pointer. */
1195 size = GET_MODE_SIZE (Pmode);
1197 /* The second value is the structure value address unless this is
1198 passed as an "invisible" first argument. */
1199 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1200 size += GET_MODE_SIZE (Pmode);
1202 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1203 if (FUNCTION_ARG_REGNO_P (regno))
1205 mode = reg_raw_mode[regno];
1207 gcc_assert (mode != VOIDmode);
1209 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1210 if (size % align != 0)
1211 size = CEIL (size, align) * align;
1212 apply_args_reg_offset[regno] = size;
1213 size += GET_MODE_SIZE (mode);
1214 apply_args_mode[regno] = mode;
1216 else
1218 apply_args_mode[regno] = VOIDmode;
1219 apply_args_reg_offset[regno] = 0;
1222 return size;
1225 /* Return the size required for the block returned by __builtin_apply,
1226 and initialize apply_result_mode. */
1228 static int
1229 apply_result_size (void)
1231 static int size = -1;
1232 int align, regno;
1233 enum machine_mode mode;
1235 /* The values computed by this function never change. */
1236 if (size < 0)
1238 size = 0;
1240 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1241 if (FUNCTION_VALUE_REGNO_P (regno))
1243 mode = reg_raw_mode[regno];
1245 gcc_assert (mode != VOIDmode);
1247 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1248 if (size % align != 0)
1249 size = CEIL (size, align) * align;
1250 size += GET_MODE_SIZE (mode);
1251 apply_result_mode[regno] = mode;
1253 else
1254 apply_result_mode[regno] = VOIDmode;
1256 /* Allow targets that use untyped_call and untyped_return to override
1257 the size so that machine-specific information can be stored here. */
1258 #ifdef APPLY_RESULT_SIZE
1259 size = APPLY_RESULT_SIZE;
1260 #endif
1262 return size;
1265 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1266 /* Create a vector describing the result block RESULT. If SAVEP is true,
1267 the result block is used to save the values; otherwise it is used to
1268 restore the values. */
1270 static rtx
1271 result_vector (int savep, rtx result)
1273 int regno, size, align, nelts;
1274 enum machine_mode mode;
1275 rtx reg, mem;
1276 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1278 size = nelts = 0;
1279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1280 if ((mode = apply_result_mode[regno]) != VOIDmode)
1282 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1283 if (size % align != 0)
1284 size = CEIL (size, align) * align;
1285 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1286 mem = adjust_address (result, mode, size);
1287 savevec[nelts++] = (savep
1288 ? gen_rtx_SET (VOIDmode, mem, reg)
1289 : gen_rtx_SET (VOIDmode, reg, mem));
1290 size += GET_MODE_SIZE (mode);
1292 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1294 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1296 /* Save the state required to perform an untyped call with the same
1297 arguments as were passed to the current function. */
1299 static rtx
1300 expand_builtin_apply_args_1 (void)
1302 rtx registers, tem;
1303 int size, align, regno;
1304 enum machine_mode mode;
1305 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1307 /* Create a block where the arg-pointer, structure value address,
1308 and argument registers can be saved. */
1309 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1311 /* Walk past the arg-pointer and structure value address. */
1312 size = GET_MODE_SIZE (Pmode);
1313 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1314 size += GET_MODE_SIZE (Pmode);
1316 /* Save each register used in calling a function to the block. */
1317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1318 if ((mode = apply_args_mode[regno]) != VOIDmode)
1320 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1321 if (size % align != 0)
1322 size = CEIL (size, align) * align;
1324 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1326 emit_move_insn (adjust_address (registers, mode, size), tem);
1327 size += GET_MODE_SIZE (mode);
1330 /* Save the arg pointer to the block. */
1331 tem = copy_to_reg (virtual_incoming_args_rtx);
1332 #ifdef STACK_GROWS_DOWNWARD
1333 /* We need the pointer as the caller actually passed them to us, not
1334 as we might have pretended they were passed. Make sure it's a valid
1335 operand, as emit_move_insn isn't expected to handle a PLUS. */
1337 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1338 NULL_RTX);
1339 #endif
1340 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1342 size = GET_MODE_SIZE (Pmode);
1344 /* Save the structure value address unless this is passed as an
1345 "invisible" first argument. */
1346 if (struct_incoming_value)
1348 emit_move_insn (adjust_address (registers, Pmode, size),
1349 copy_to_reg (struct_incoming_value));
1350 size += GET_MODE_SIZE (Pmode);
1353 /* Return the address of the block. */
1354 return copy_addr_to_reg (XEXP (registers, 0));
1357 /* __builtin_apply_args returns block of memory allocated on
1358 the stack into which is stored the arg pointer, structure
1359 value address, static chain, and all the registers that might
1360 possibly be used in performing a function call. The code is
1361 moved to the start of the function so the incoming values are
1362 saved. */
1364 static rtx
1365 expand_builtin_apply_args (void)
1367 /* Don't do __builtin_apply_args more than once in a function.
1368 Save the result of the first call and reuse it. */
1369 if (apply_args_value != 0)
1370 return apply_args_value;
1372 /* When this function is called, it means that registers must be
1373 saved on entry to this function. So we migrate the
1374 call to the first insn of this function. */
1375 rtx temp;
1376 rtx seq;
1378 start_sequence ();
1379 temp = expand_builtin_apply_args_1 ();
1380 seq = get_insns ();
1381 end_sequence ();
1383 apply_args_value = temp;
1385 /* Put the insns after the NOTE that starts the function.
1386 If this is inside a start_sequence, make the outer-level insn
1387 chain current, so the code is placed at the start of the
1388 function. */
1389 push_topmost_sequence ();
1390 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1391 pop_topmost_sequence ();
1392 return temp;
1396 /* Perform an untyped call and save the state required to perform an
1397 untyped return of whatever value was returned by the given function. */
1399 static rtx
1400 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1402 int size, align, regno;
1403 enum machine_mode mode;
1404 rtx incoming_args, result, reg, dest, src, call_insn;
1405 rtx old_stack_level = 0;
1406 rtx call_fusage = 0;
1407 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1409 arguments = convert_memory_address (Pmode, arguments);
1411 /* Create a block where the return registers can be saved. */
1412 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1414 /* Fetch the arg pointer from the ARGUMENTS block. */
1415 incoming_args = gen_reg_rtx (Pmode);
1416 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1417 #ifndef STACK_GROWS_DOWNWARD
1418 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1419 incoming_args, 0, OPTAB_LIB_WIDEN);
1420 #endif
1422 /* Push a new argument block and copy the arguments. Do not allow
1423 the (potential) memcpy call below to interfere with our stack
1424 manipulations. */
1425 do_pending_stack_adjust ();
1426 NO_DEFER_POP;
1428 /* Save the stack with nonlocal if available. */
1429 #ifdef HAVE_save_stack_nonlocal
1430 if (HAVE_save_stack_nonlocal)
1431 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1432 else
1433 #endif
1434 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1436 /* Allocate a block of memory onto the stack and copy the memory
1437 arguments to the outgoing arguments address. */
1438 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1439 dest = virtual_outgoing_args_rtx;
1440 #ifndef STACK_GROWS_DOWNWARD
1441 if (GET_CODE (argsize) == CONST_INT)
1442 dest = plus_constant (dest, -INTVAL (argsize));
1443 else
1444 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1445 #endif
1446 dest = gen_rtx_MEM (BLKmode, dest);
1447 set_mem_align (dest, PARM_BOUNDARY);
1448 src = gen_rtx_MEM (BLKmode, incoming_args);
1449 set_mem_align (src, PARM_BOUNDARY);
1450 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1452 /* Refer to the argument block. */
1453 apply_args_size ();
1454 arguments = gen_rtx_MEM (BLKmode, arguments);
1455 set_mem_align (arguments, PARM_BOUNDARY);
1457 /* Walk past the arg-pointer and structure value address. */
1458 size = GET_MODE_SIZE (Pmode);
1459 if (struct_value)
1460 size += GET_MODE_SIZE (Pmode);
1462 /* Restore each of the registers previously saved. Make USE insns
1463 for each of these registers for use in making the call. */
1464 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1465 if ((mode = apply_args_mode[regno]) != VOIDmode)
1467 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1468 if (size % align != 0)
1469 size = CEIL (size, align) * align;
1470 reg = gen_rtx_REG (mode, regno);
1471 emit_move_insn (reg, adjust_address (arguments, mode, size));
1472 use_reg (&call_fusage, reg);
1473 size += GET_MODE_SIZE (mode);
1476 /* Restore the structure value address unless this is passed as an
1477 "invisible" first argument. */
1478 size = GET_MODE_SIZE (Pmode);
1479 if (struct_value)
1481 rtx value = gen_reg_rtx (Pmode);
1482 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1483 emit_move_insn (struct_value, value);
1484 if (REG_P (struct_value))
1485 use_reg (&call_fusage, struct_value);
1486 size += GET_MODE_SIZE (Pmode);
1489 /* All arguments and registers used for the call are set up by now! */
1490 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1492 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1493 and we don't want to load it into a register as an optimization,
1494 because prepare_call_address already did it if it should be done. */
1495 if (GET_CODE (function) != SYMBOL_REF)
1496 function = memory_address (FUNCTION_MODE, function);
1498 /* Generate the actual call instruction and save the return value. */
1499 #ifdef HAVE_untyped_call
1500 if (HAVE_untyped_call)
1501 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1502 result, result_vector (1, result)));
1503 else
1504 #endif
1505 #ifdef HAVE_call_value
1506 if (HAVE_call_value)
1508 rtx valreg = 0;
1510 /* Locate the unique return register. It is not possible to
1511 express a call that sets more than one return register using
1512 call_value; use untyped_call for that. In fact, untyped_call
1513 only needs to save the return registers in the given block. */
1514 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1515 if ((mode = apply_result_mode[regno]) != VOIDmode)
1517 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1519 valreg = gen_rtx_REG (mode, regno);
1522 emit_call_insn (GEN_CALL_VALUE (valreg,
1523 gen_rtx_MEM (FUNCTION_MODE, function),
1524 const0_rtx, NULL_RTX, const0_rtx));
1526 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1528 else
1529 #endif
1530 gcc_unreachable ();
1532 /* Find the CALL insn we just emitted, and attach the register usage
1533 information. */
1534 call_insn = last_call_insn ();
1535 add_function_usage_to (call_insn, call_fusage);
1537 /* Restore the stack. */
1538 #ifdef HAVE_save_stack_nonlocal
1539 if (HAVE_save_stack_nonlocal)
1540 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1541 else
1542 #endif
1543 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1545 OK_DEFER_POP;
1547 /* Return the address of the result block. */
1548 result = copy_addr_to_reg (XEXP (result, 0));
1549 return convert_memory_address (ptr_mode, result);
1552 /* Perform an untyped return. */
1554 static void
1555 expand_builtin_return (rtx result)
1557 int size, align, regno;
1558 enum machine_mode mode;
1559 rtx reg;
1560 rtx call_fusage = 0;
1562 result = convert_memory_address (Pmode, result);
1564 apply_result_size ();
1565 result = gen_rtx_MEM (BLKmode, result);
1567 #ifdef HAVE_untyped_return
1568 if (HAVE_untyped_return)
1570 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1571 emit_barrier ();
1572 return;
1574 #endif
1576 /* Restore the return value and note that each value is used. */
1577 size = 0;
1578 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1579 if ((mode = apply_result_mode[regno]) != VOIDmode)
1581 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1582 if (size % align != 0)
1583 size = CEIL (size, align) * align;
1584 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1585 emit_move_insn (reg, adjust_address (result, mode, size));
1587 push_to_sequence (call_fusage);
1588 emit_insn (gen_rtx_USE (VOIDmode, reg));
1589 call_fusage = get_insns ();
1590 end_sequence ();
1591 size += GET_MODE_SIZE (mode);
1594 /* Put the USE insns before the return. */
1595 emit_insn (call_fusage);
1597 /* Return whatever values was restored by jumping directly to the end
1598 of the function. */
1599 expand_naked_return ();
1602 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1604 static enum type_class
1605 type_to_class (tree type)
1607 switch (TREE_CODE (type))
1609 case VOID_TYPE: return void_type_class;
1610 case INTEGER_TYPE: return integer_type_class;
1611 case ENUMERAL_TYPE: return enumeral_type_class;
1612 case BOOLEAN_TYPE: return boolean_type_class;
1613 case POINTER_TYPE: return pointer_type_class;
1614 case REFERENCE_TYPE: return reference_type_class;
1615 case OFFSET_TYPE: return offset_type_class;
1616 case REAL_TYPE: return real_type_class;
1617 case COMPLEX_TYPE: return complex_type_class;
1618 case FUNCTION_TYPE: return function_type_class;
1619 case METHOD_TYPE: return method_type_class;
1620 case RECORD_TYPE: return record_type_class;
1621 case UNION_TYPE:
1622 case QUAL_UNION_TYPE: return union_type_class;
1623 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1624 ? string_type_class : array_type_class);
1625 case LANG_TYPE: return lang_type_class;
1626 default: return no_type_class;
1630 /* Expand a call EXP to __builtin_classify_type. */
1632 static rtx
1633 expand_builtin_classify_type (tree exp)
1635 if (call_expr_nargs (exp))
1636 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1637 return GEN_INT (no_type_class);
1640 /* This helper macro, meant to be used in mathfn_built_in below,
1641 determines which among a set of three builtin math functions is
1642 appropriate for a given type mode. The `F' and `L' cases are
1643 automatically generated from the `double' case. */
1644 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1645 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1646 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1647 fcodel = BUILT_IN_MATHFN##L ; break;
1649 /* Return mathematic function equivalent to FN but operating directly
1650 on TYPE, if available. If we can't do the conversion, return zero. */
1651 tree
1652 mathfn_built_in (tree type, enum built_in_function fn)
1654 enum built_in_function fcode, fcodef, fcodel;
1656 switch (fn)
1658 CASE_MATHFN (BUILT_IN_ACOS)
1659 CASE_MATHFN (BUILT_IN_ACOSH)
1660 CASE_MATHFN (BUILT_IN_ASIN)
1661 CASE_MATHFN (BUILT_IN_ASINH)
1662 CASE_MATHFN (BUILT_IN_ATAN)
1663 CASE_MATHFN (BUILT_IN_ATAN2)
1664 CASE_MATHFN (BUILT_IN_ATANH)
1665 CASE_MATHFN (BUILT_IN_CBRT)
1666 CASE_MATHFN (BUILT_IN_CEIL)
1667 CASE_MATHFN (BUILT_IN_CEXPI)
1668 CASE_MATHFN (BUILT_IN_COPYSIGN)
1669 CASE_MATHFN (BUILT_IN_COS)
1670 CASE_MATHFN (BUILT_IN_COSH)
1671 CASE_MATHFN (BUILT_IN_DREM)
1672 CASE_MATHFN (BUILT_IN_ERF)
1673 CASE_MATHFN (BUILT_IN_ERFC)
1674 CASE_MATHFN (BUILT_IN_EXP)
1675 CASE_MATHFN (BUILT_IN_EXP10)
1676 CASE_MATHFN (BUILT_IN_EXP2)
1677 CASE_MATHFN (BUILT_IN_EXPM1)
1678 CASE_MATHFN (BUILT_IN_FABS)
1679 CASE_MATHFN (BUILT_IN_FDIM)
1680 CASE_MATHFN (BUILT_IN_FLOOR)
1681 CASE_MATHFN (BUILT_IN_FMA)
1682 CASE_MATHFN (BUILT_IN_FMAX)
1683 CASE_MATHFN (BUILT_IN_FMIN)
1684 CASE_MATHFN (BUILT_IN_FMOD)
1685 CASE_MATHFN (BUILT_IN_FREXP)
1686 CASE_MATHFN (BUILT_IN_GAMMA)
1687 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1688 CASE_MATHFN (BUILT_IN_HYPOT)
1689 CASE_MATHFN (BUILT_IN_ILOGB)
1690 CASE_MATHFN (BUILT_IN_INF)
1691 CASE_MATHFN (BUILT_IN_ISINF)
1692 CASE_MATHFN (BUILT_IN_J0)
1693 CASE_MATHFN (BUILT_IN_J1)
1694 CASE_MATHFN (BUILT_IN_JN)
1695 CASE_MATHFN (BUILT_IN_LCEIL)
1696 CASE_MATHFN (BUILT_IN_LDEXP)
1697 CASE_MATHFN (BUILT_IN_LFLOOR)
1698 CASE_MATHFN (BUILT_IN_LGAMMA)
1699 CASE_MATHFN (BUILT_IN_LLCEIL)
1700 CASE_MATHFN (BUILT_IN_LLFLOOR)
1701 CASE_MATHFN (BUILT_IN_LLRINT)
1702 CASE_MATHFN (BUILT_IN_LLROUND)
1703 CASE_MATHFN (BUILT_IN_LOG)
1704 CASE_MATHFN (BUILT_IN_LOG10)
1705 CASE_MATHFN (BUILT_IN_LOG1P)
1706 CASE_MATHFN (BUILT_IN_LOG2)
1707 CASE_MATHFN (BUILT_IN_LOGB)
1708 CASE_MATHFN (BUILT_IN_LRINT)
1709 CASE_MATHFN (BUILT_IN_LROUND)
1710 CASE_MATHFN (BUILT_IN_MODF)
1711 CASE_MATHFN (BUILT_IN_NAN)
1712 CASE_MATHFN (BUILT_IN_NANS)
1713 CASE_MATHFN (BUILT_IN_NEARBYINT)
1714 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1715 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1716 CASE_MATHFN (BUILT_IN_POW)
1717 CASE_MATHFN (BUILT_IN_POWI)
1718 CASE_MATHFN (BUILT_IN_POW10)
1719 CASE_MATHFN (BUILT_IN_REMAINDER)
1720 CASE_MATHFN (BUILT_IN_REMQUO)
1721 CASE_MATHFN (BUILT_IN_RINT)
1722 CASE_MATHFN (BUILT_IN_ROUND)
1723 CASE_MATHFN (BUILT_IN_SCALB)
1724 CASE_MATHFN (BUILT_IN_SCALBLN)
1725 CASE_MATHFN (BUILT_IN_SCALBN)
1726 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1727 CASE_MATHFN (BUILT_IN_SIN)
1728 CASE_MATHFN (BUILT_IN_SINCOS)
1729 CASE_MATHFN (BUILT_IN_SINH)
1730 CASE_MATHFN (BUILT_IN_SQRT)
1731 CASE_MATHFN (BUILT_IN_TAN)
1732 CASE_MATHFN (BUILT_IN_TANH)
1733 CASE_MATHFN (BUILT_IN_TGAMMA)
1734 CASE_MATHFN (BUILT_IN_TRUNC)
1735 CASE_MATHFN (BUILT_IN_Y0)
1736 CASE_MATHFN (BUILT_IN_Y1)
1737 CASE_MATHFN (BUILT_IN_YN)
1739 default:
1740 return NULL_TREE;
1743 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1744 return implicit_built_in_decls[fcode];
1745 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1746 return implicit_built_in_decls[fcodef];
1747 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1748 return implicit_built_in_decls[fcodel];
1749 else
1750 return NULL_TREE;
1753 /* If errno must be maintained, expand the RTL to check if the result,
1754 TARGET, of a built-in function call, EXP, is NaN, and if so set
1755 errno to EDOM. */
1757 static void
1758 expand_errno_check (tree exp, rtx target)
1760 rtx lab = gen_label_rtx ();
1762 /* Test the result; if it is NaN, set errno=EDOM because
1763 the argument was not in the domain. */
1764 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1765 0, lab);
1767 #ifdef TARGET_EDOM
1768 /* If this built-in doesn't throw an exception, set errno directly. */
1769 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1771 #ifdef GEN_ERRNO_RTX
1772 rtx errno_rtx = GEN_ERRNO_RTX;
1773 #else
1774 rtx errno_rtx
1775 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1776 #endif
1777 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1778 emit_label (lab);
1779 return;
1781 #endif
1783 /* We can't set errno=EDOM directly; let the library call do it.
1784 Pop the arguments right away in case the call gets deleted. */
1785 NO_DEFER_POP;
1786 expand_call (exp, target, 0);
1787 OK_DEFER_POP;
1788 emit_label (lab);
1791 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1792 Return NULL_RTX if a normal call should be emitted rather than expanding
1793 the function in-line. EXP is the expression that is a call to the builtin
1794 function; if convenient, the result should be placed in TARGET.
1795 SUBTARGET may be used as the target for computing one of EXP's operands. */
1797 static rtx
1798 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1800 optab builtin_optab;
1801 rtx op0, insns, before_call;
1802 tree fndecl = get_callee_fndecl (exp);
1803 enum machine_mode mode;
1804 bool errno_set = false;
1805 tree arg, narg;
1807 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1808 return NULL_RTX;
1810 arg = CALL_EXPR_ARG (exp, 0);
1812 switch (DECL_FUNCTION_CODE (fndecl))
1814 CASE_FLT_FN (BUILT_IN_SQRT):
1815 errno_set = ! tree_expr_nonnegative_p (arg);
1816 builtin_optab = sqrt_optab;
1817 break;
1818 CASE_FLT_FN (BUILT_IN_EXP):
1819 errno_set = true; builtin_optab = exp_optab; break;
1820 CASE_FLT_FN (BUILT_IN_EXP10):
1821 CASE_FLT_FN (BUILT_IN_POW10):
1822 errno_set = true; builtin_optab = exp10_optab; break;
1823 CASE_FLT_FN (BUILT_IN_EXP2):
1824 errno_set = true; builtin_optab = exp2_optab; break;
1825 CASE_FLT_FN (BUILT_IN_EXPM1):
1826 errno_set = true; builtin_optab = expm1_optab; break;
1827 CASE_FLT_FN (BUILT_IN_LOGB):
1828 errno_set = true; builtin_optab = logb_optab; break;
1829 CASE_FLT_FN (BUILT_IN_LOG):
1830 errno_set = true; builtin_optab = log_optab; break;
1831 CASE_FLT_FN (BUILT_IN_LOG10):
1832 errno_set = true; builtin_optab = log10_optab; break;
1833 CASE_FLT_FN (BUILT_IN_LOG2):
1834 errno_set = true; builtin_optab = log2_optab; break;
1835 CASE_FLT_FN (BUILT_IN_LOG1P):
1836 errno_set = true; builtin_optab = log1p_optab; break;
1837 CASE_FLT_FN (BUILT_IN_ASIN):
1838 builtin_optab = asin_optab; break;
1839 CASE_FLT_FN (BUILT_IN_ACOS):
1840 builtin_optab = acos_optab; break;
1841 CASE_FLT_FN (BUILT_IN_TAN):
1842 builtin_optab = tan_optab; break;
1843 CASE_FLT_FN (BUILT_IN_ATAN):
1844 builtin_optab = atan_optab; break;
1845 CASE_FLT_FN (BUILT_IN_FLOOR):
1846 builtin_optab = floor_optab; break;
1847 CASE_FLT_FN (BUILT_IN_CEIL):
1848 builtin_optab = ceil_optab; break;
1849 CASE_FLT_FN (BUILT_IN_TRUNC):
1850 builtin_optab = btrunc_optab; break;
1851 CASE_FLT_FN (BUILT_IN_ROUND):
1852 builtin_optab = round_optab; break;
1853 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1854 builtin_optab = nearbyint_optab;
1855 if (flag_trapping_math)
1856 break;
1857 /* Else fallthrough and expand as rint. */
1858 CASE_FLT_FN (BUILT_IN_RINT):
1859 builtin_optab = rint_optab; break;
1860 default:
1861 gcc_unreachable ();
1864 /* Make a suitable register to place result in. */
1865 mode = TYPE_MODE (TREE_TYPE (exp));
1867 if (! flag_errno_math || ! HONOR_NANS (mode))
1868 errno_set = false;
1870 /* Before working hard, check whether the instruction is available. */
1871 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1873 target = gen_reg_rtx (mode);
1875 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1876 need to expand the argument again. This way, we will not perform
1877 side-effects more the once. */
1878 narg = builtin_save_expr (arg);
1879 if (narg != arg)
1881 arg = narg;
1882 exp = build_call_expr (fndecl, 1, arg);
1885 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1887 start_sequence ();
1889 /* Compute into TARGET.
1890 Set TARGET to wherever the result comes back. */
1891 target = expand_unop (mode, builtin_optab, op0, target, 0);
1893 if (target != 0)
1895 if (errno_set)
1896 expand_errno_check (exp, target);
1898 /* Output the entire sequence. */
1899 insns = get_insns ();
1900 end_sequence ();
1901 emit_insn (insns);
1902 return target;
1905 /* If we were unable to expand via the builtin, stop the sequence
1906 (without outputting the insns) and call to the library function
1907 with the stabilized argument list. */
1908 end_sequence ();
1911 before_call = get_last_insn ();
1913 target = expand_call (exp, target, target == const0_rtx);
1915 /* If this is a sqrt operation and we don't care about errno, try to
1916 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1917 This allows the semantics of the libcall to be visible to the RTL
1918 optimizers. */
1919 if (builtin_optab == sqrt_optab && !errno_set)
1921 /* Search backwards through the insns emitted by expand_call looking
1922 for the instruction with the REG_RETVAL note. */
1923 rtx last = get_last_insn ();
1924 while (last != before_call)
1926 if (find_reg_note (last, REG_RETVAL, NULL))
1928 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1929 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1930 two elements, i.e. symbol_ref(sqrt) and the operand. */
1931 if (note
1932 && GET_CODE (note) == EXPR_LIST
1933 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1934 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1935 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1937 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1938 /* Check operand is a register with expected mode. */
1939 if (operand
1940 && REG_P (operand)
1941 && GET_MODE (operand) == mode)
1943 /* Replace the REG_EQUAL note with a SQRT rtx. */
1944 rtx equiv = gen_rtx_SQRT (mode, operand);
1945 set_unique_reg_note (last, REG_EQUAL, equiv);
1948 break;
1950 last = PREV_INSN (last);
1954 return target;
1957 /* Expand a call to the builtin binary math functions (pow and atan2).
1958 Return NULL_RTX if a normal call should be emitted rather than expanding the
1959 function in-line. EXP is the expression that is a call to the builtin
1960 function; if convenient, the result should be placed in TARGET.
1961 SUBTARGET may be used as the target for computing one of EXP's
1962 operands. */
1964 static rtx
1965 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1967 optab builtin_optab;
1968 rtx op0, op1, insns;
1969 int op1_type = REAL_TYPE;
1970 tree fndecl = get_callee_fndecl (exp);
1971 tree arg0, arg1, narg;
1972 enum machine_mode mode;
1973 bool errno_set = true;
1974 bool stable = true;
1976 switch (DECL_FUNCTION_CODE (fndecl))
1978 CASE_FLT_FN (BUILT_IN_SCALBN):
1979 CASE_FLT_FN (BUILT_IN_SCALBLN):
1980 CASE_FLT_FN (BUILT_IN_LDEXP):
1981 op1_type = INTEGER_TYPE;
1982 default:
1983 break;
1986 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
1987 return NULL_RTX;
1989 arg0 = CALL_EXPR_ARG (exp, 0);
1990 arg1 = CALL_EXPR_ARG (exp, 1);
1992 switch (DECL_FUNCTION_CODE (fndecl))
1994 CASE_FLT_FN (BUILT_IN_POW):
1995 builtin_optab = pow_optab; break;
1996 CASE_FLT_FN (BUILT_IN_ATAN2):
1997 builtin_optab = atan2_optab; break;
1998 CASE_FLT_FN (BUILT_IN_SCALB):
1999 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2000 return 0;
2001 builtin_optab = scalb_optab; break;
2002 CASE_FLT_FN (BUILT_IN_SCALBN):
2003 CASE_FLT_FN (BUILT_IN_SCALBLN):
2004 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2005 return 0;
2006 /* Fall through... */
2007 CASE_FLT_FN (BUILT_IN_LDEXP):
2008 builtin_optab = ldexp_optab; break;
2009 CASE_FLT_FN (BUILT_IN_FMOD):
2010 builtin_optab = fmod_optab; break;
2011 CASE_FLT_FN (BUILT_IN_REMAINDER):
2012 CASE_FLT_FN (BUILT_IN_DREM):
2013 builtin_optab = remainder_optab; break;
2014 default:
2015 gcc_unreachable ();
2018 /* Make a suitable register to place result in. */
2019 mode = TYPE_MODE (TREE_TYPE (exp));
2021 /* Before working hard, check whether the instruction is available. */
2022 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2023 return NULL_RTX;
2025 target = gen_reg_rtx (mode);
2027 if (! flag_errno_math || ! HONOR_NANS (mode))
2028 errno_set = false;
2030 /* Always stabilize the argument list. */
2031 narg = builtin_save_expr (arg1);
2032 if (narg != arg1)
2034 arg1 = narg;
2035 stable = false;
2037 narg = builtin_save_expr (arg0);
2038 if (narg != arg0)
2040 arg0 = narg;
2041 stable = false;
2044 if (! stable)
2045 exp = build_call_expr (fndecl, 2, arg0, arg1);
2047 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2048 op1 = expand_normal (arg1);
2050 start_sequence ();
2052 /* Compute into TARGET.
2053 Set TARGET to wherever the result comes back. */
2054 target = expand_binop (mode, builtin_optab, op0, op1,
2055 target, 0, OPTAB_DIRECT);
2057 /* If we were unable to expand via the builtin, stop the sequence
2058 (without outputting the insns) and call to the library function
2059 with the stabilized argument list. */
2060 if (target == 0)
2062 end_sequence ();
2063 return expand_call (exp, target, target == const0_rtx);
2066 if (errno_set)
2067 expand_errno_check (exp, target);
2069 /* Output the entire sequence. */
2070 insns = get_insns ();
2071 end_sequence ();
2072 emit_insn (insns);
2074 return target;
2077 /* Expand a call to the builtin sin and cos math functions.
2078 Return NULL_RTX if a normal call should be emitted rather than expanding the
2079 function in-line. EXP is the expression that is a call to the builtin
2080 function; if convenient, the result should be placed in TARGET.
2081 SUBTARGET may be used as the target for computing one of EXP's
2082 operands. */
2084 static rtx
2085 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2087 optab builtin_optab;
2088 rtx op0, insns;
2089 tree fndecl = get_callee_fndecl (exp);
2090 enum machine_mode mode;
2091 tree arg, narg;
2093 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2094 return NULL_RTX;
2096 arg = CALL_EXPR_ARG (exp, 0);
2098 switch (DECL_FUNCTION_CODE (fndecl))
2100 CASE_FLT_FN (BUILT_IN_SIN):
2101 CASE_FLT_FN (BUILT_IN_COS):
2102 builtin_optab = sincos_optab; break;
2103 default:
2104 gcc_unreachable ();
2107 /* Make a suitable register to place result in. */
2108 mode = TYPE_MODE (TREE_TYPE (exp));
2110 /* Check if sincos insn is available, otherwise fallback
2111 to sin or cos insn. */
2112 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2113 switch (DECL_FUNCTION_CODE (fndecl))
2115 CASE_FLT_FN (BUILT_IN_SIN):
2116 builtin_optab = sin_optab; break;
2117 CASE_FLT_FN (BUILT_IN_COS):
2118 builtin_optab = cos_optab; break;
2119 default:
2120 gcc_unreachable ();
2123 /* Before working hard, check whether the instruction is available. */
2124 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2126 target = gen_reg_rtx (mode);
2128 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2129 need to expand the argument again. This way, we will not perform
2130 side-effects more the once. */
2131 narg = save_expr (arg);
2132 if (narg != arg)
2134 arg = narg;
2135 exp = build_call_expr (fndecl, 1, arg);
2138 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2140 start_sequence ();
2142 /* Compute into TARGET.
2143 Set TARGET to wherever the result comes back. */
2144 if (builtin_optab == sincos_optab)
2146 int result;
2148 switch (DECL_FUNCTION_CODE (fndecl))
2150 CASE_FLT_FN (BUILT_IN_SIN):
2151 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2152 break;
2153 CASE_FLT_FN (BUILT_IN_COS):
2154 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2155 break;
2156 default:
2157 gcc_unreachable ();
2159 gcc_assert (result);
2161 else
2163 target = expand_unop (mode, builtin_optab, op0, target, 0);
2166 if (target != 0)
2168 /* Output the entire sequence. */
2169 insns = get_insns ();
2170 end_sequence ();
2171 emit_insn (insns);
2172 return target;
2175 /* If we were unable to expand via the builtin, stop the sequence
2176 (without outputting the insns) and call to the library function
2177 with the stabilized argument list. */
2178 end_sequence ();
2181 target = expand_call (exp, target, target == const0_rtx);
2183 return target;
2186 /* Expand a call to one of the builtin math functions that operate on
2187 floating point argument and output an integer result (ilogb, isinf,
2188 isnan, etc).
2189 Return 0 if a normal call should be emitted rather than expanding the
2190 function in-line. EXP is the expression that is a call to the builtin
2191 function; if convenient, the result should be placed in TARGET.
2192 SUBTARGET may be used as the target for computing one of EXP's operands. */
2194 static rtx
2195 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2197 optab builtin_optab;
2198 enum insn_code icode;
2199 rtx op0;
2200 tree fndecl = get_callee_fndecl (exp);
2201 enum machine_mode mode;
2202 bool errno_set = false;
2203 tree arg, narg;
2205 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2206 return NULL_RTX;
2208 arg = CALL_EXPR_ARG (exp, 0);
2210 switch (DECL_FUNCTION_CODE (fndecl))
2212 CASE_FLT_FN (BUILT_IN_ILOGB):
2213 errno_set = true; builtin_optab = ilogb_optab; break;
2214 CASE_FLT_FN (BUILT_IN_ISINF):
2215 builtin_optab = isinf_optab; break;
2216 default:
2217 gcc_unreachable ();
2220 /* There's no easy way to detect the case we need to set EDOM. */
2221 if (flag_errno_math && errno_set)
2222 return NULL_RTX;
2224 /* Optab mode depends on the mode of the input argument. */
2225 mode = TYPE_MODE (TREE_TYPE (arg));
2227 icode = builtin_optab->handlers[(int) mode].insn_code;
2229 /* Before working hard, check whether the instruction is available. */
2230 if (icode != CODE_FOR_nothing)
2232 /* Make a suitable register to place result in. */
2233 if (!target
2234 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2235 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2237 gcc_assert (insn_data[icode].operand[0].predicate
2238 (target, GET_MODE (target)));
2240 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2241 need to expand the argument again. This way, we will not perform
2242 side-effects more the once. */
2243 narg = builtin_save_expr (arg);
2244 if (narg != arg)
2246 arg = narg;
2247 exp = build_call_expr (fndecl, 1, arg);
2250 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2252 if (mode != GET_MODE (op0))
2253 op0 = convert_to_mode (mode, op0, 0);
2255 /* Compute into TARGET.
2256 Set TARGET to wherever the result comes back. */
2257 emit_unop_insn (icode, target, op0, UNKNOWN);
2258 return target;
2261 target = expand_call (exp, target, target == const0_rtx);
2263 return target;
2266 /* Expand a call to the builtin sincos math function.
2267 Return NULL_RTX if a normal call should be emitted rather than expanding the
2268 function in-line. EXP is the expression that is a call to the builtin
2269 function. */
2271 static rtx
2272 expand_builtin_sincos (tree exp)
2274 rtx op0, op1, op2, target1, target2;
2275 enum machine_mode mode;
2276 tree arg, sinp, cosp;
2277 int result;
2279 if (!validate_arglist (exp, REAL_TYPE,
2280 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2281 return NULL_RTX;
2283 arg = CALL_EXPR_ARG (exp, 0);
2284 sinp = CALL_EXPR_ARG (exp, 1);
2285 cosp = CALL_EXPR_ARG (exp, 2);
2287 /* Make a suitable register to place result in. */
2288 mode = TYPE_MODE (TREE_TYPE (arg));
2290 /* Check if sincos insn is available, otherwise emit the call. */
2291 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2292 return NULL_RTX;
2294 target1 = gen_reg_rtx (mode);
2295 target2 = gen_reg_rtx (mode);
2297 op0 = expand_normal (arg);
2298 op1 = expand_normal (build_fold_indirect_ref (sinp));
2299 op2 = expand_normal (build_fold_indirect_ref (cosp));
2301 /* Compute into target1 and target2.
2302 Set TARGET to wherever the result comes back. */
2303 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2304 gcc_assert (result);
2306 /* Move target1 and target2 to the memory locations indicated
2307 by op1 and op2. */
2308 emit_move_insn (op1, target1);
2309 emit_move_insn (op2, target2);
2311 return const0_rtx;
2314 /* Expand a call to the internal cexpi builtin to the sincos math function.
2315 EXP is the expression that is a call to the builtin function; if convenient,
2316 the result should be placed in TARGET. SUBTARGET may be used as the target
2317 for computing one of EXP's operands. */
2319 static rtx
2320 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2322 tree fndecl = get_callee_fndecl (exp);
2323 tree arg, type;
2324 enum machine_mode mode;
2325 rtx op0, op1, op2;
2327 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2328 return NULL_RTX;
2330 arg = CALL_EXPR_ARG (exp, 0);
2331 type = TREE_TYPE (arg);
2332 mode = TYPE_MODE (TREE_TYPE (arg));
2334 /* Try expanding via a sincos optab, fall back to emitting a libcall
2335 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2336 is only generated from sincos, cexp or if we have either of them. */
2337 if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2339 op1 = gen_reg_rtx (mode);
2340 op2 = gen_reg_rtx (mode);
2342 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2344 /* Compute into op1 and op2. */
2345 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2347 else if (TARGET_HAS_SINCOS)
2349 tree call, fn = NULL_TREE;
2350 tree top1, top2;
2351 rtx op1a, op2a;
2353 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2354 fn = built_in_decls[BUILT_IN_SINCOSF];
2355 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2356 fn = built_in_decls[BUILT_IN_SINCOS];
2357 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2358 fn = built_in_decls[BUILT_IN_SINCOSL];
2359 else
2360 gcc_unreachable ();
2362 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2363 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2364 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2365 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2366 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2367 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2369 /* Make sure not to fold the sincos call again. */
2370 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2371 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2372 call, 3, arg, top1, top2));
2374 else
2376 tree call, fn = NULL_TREE, narg;
2377 tree ctype = build_complex_type (type);
2379 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2380 fn = built_in_decls[BUILT_IN_CEXPF];
2381 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2382 fn = built_in_decls[BUILT_IN_CEXP];
2383 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2384 fn = built_in_decls[BUILT_IN_CEXPL];
2385 else
2386 gcc_unreachable ();
2388 /* If we don't have a decl for cexp create one. This is the
2389 friendliest fallback if the user calls __builtin_cexpi
2390 without full target C99 function support. */
2391 if (fn == NULL_TREE)
2393 tree fntype;
2394 const char *name = NULL;
2396 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2397 name = "cexpf";
2398 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2399 name = "cexp";
2400 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2401 name = "cexpl";
2403 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2404 fn = build_fn_decl (name, fntype);
2407 narg = fold_build2 (COMPLEX_EXPR, ctype,
2408 build_real (type, dconst0), arg);
2410 /* Make sure not to fold the cexp call again. */
2411 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2412 return expand_expr (build_call_nary (ctype, call, 1, narg),
2413 target, VOIDmode, 0);
2416 /* Now build the proper return type. */
2417 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2418 make_tree (TREE_TYPE (arg), op2),
2419 make_tree (TREE_TYPE (arg), op1)),
2420 target, VOIDmode, 0);
2423 /* Expand a call to one of the builtin rounding functions gcc defines
2424 as an extension (lfloor and lceil). As these are gcc extensions we
2425 do not need to worry about setting errno to EDOM.
2426 If expanding via optab fails, lower expression to (int)(floor(x)).
2427 EXP is the expression that is a call to the builtin function;
2428 if convenient, the result should be placed in TARGET. SUBTARGET may
2429 be used as the target for computing one of EXP's operands. */
2431 static rtx
2432 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2434 convert_optab builtin_optab;
2435 rtx op0, insns, tmp;
2436 tree fndecl = get_callee_fndecl (exp);
2437 enum built_in_function fallback_fn;
2438 tree fallback_fndecl;
2439 enum machine_mode mode;
2440 tree arg, narg;
2442 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2443 gcc_unreachable ();
2445 arg = CALL_EXPR_ARG (exp, 0);
2447 switch (DECL_FUNCTION_CODE (fndecl))
2449 CASE_FLT_FN (BUILT_IN_LCEIL):
2450 CASE_FLT_FN (BUILT_IN_LLCEIL):
2451 builtin_optab = lceil_optab;
2452 fallback_fn = BUILT_IN_CEIL;
2453 break;
2455 CASE_FLT_FN (BUILT_IN_LFLOOR):
2456 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2457 builtin_optab = lfloor_optab;
2458 fallback_fn = BUILT_IN_FLOOR;
2459 break;
2461 default:
2462 gcc_unreachable ();
2465 /* Make a suitable register to place result in. */
2466 mode = TYPE_MODE (TREE_TYPE (exp));
2468 target = gen_reg_rtx (mode);
2470 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2471 need to expand the argument again. This way, we will not perform
2472 side-effects more the once. */
2473 narg = builtin_save_expr (arg);
2474 if (narg != arg)
2476 arg = narg;
2477 exp = build_call_expr (fndecl, 1, arg);
2480 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2482 start_sequence ();
2484 /* Compute into TARGET. */
2485 if (expand_sfix_optab (target, op0, builtin_optab))
2487 /* Output the entire sequence. */
2488 insns = get_insns ();
2489 end_sequence ();
2490 emit_insn (insns);
2491 return target;
2494 /* If we were unable to expand via the builtin, stop the sequence
2495 (without outputting the insns). */
2496 end_sequence ();
2498 /* Fall back to floating point rounding optab. */
2499 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2501 /* For non-C99 targets we may end up without a fallback fndecl here
2502 if the user called __builtin_lfloor directly. In this case emit
2503 a call to the floor/ceil variants nevertheless. This should result
2504 in the best user experience for not full C99 targets. */
2505 if (fallback_fndecl == NULL_TREE)
2507 tree fntype;
2508 const char *name = NULL;
2510 switch (DECL_FUNCTION_CODE (fndecl))
2512 case BUILT_IN_LCEIL:
2513 case BUILT_IN_LLCEIL:
2514 name = "ceil";
2515 break;
2516 case BUILT_IN_LCEILF:
2517 case BUILT_IN_LLCEILF:
2518 name = "ceilf";
2519 break;
2520 case BUILT_IN_LCEILL:
2521 case BUILT_IN_LLCEILL:
2522 name = "ceill";
2523 break;
2524 case BUILT_IN_LFLOOR:
2525 case BUILT_IN_LLFLOOR:
2526 name = "floor";
2527 break;
2528 case BUILT_IN_LFLOORF:
2529 case BUILT_IN_LLFLOORF:
2530 name = "floorf";
2531 break;
2532 case BUILT_IN_LFLOORL:
2533 case BUILT_IN_LLFLOORL:
2534 name = "floorl";
2535 break;
2536 default:
2537 gcc_unreachable ();
2540 fntype = build_function_type_list (TREE_TYPE (arg),
2541 TREE_TYPE (arg), NULL_TREE);
2542 fallback_fndecl = build_fn_decl (name, fntype);
2545 exp = build_call_expr (fallback_fndecl, 1, arg);
2547 tmp = expand_normal (exp);
2549 /* Truncate the result of floating point optab to integer
2550 via expand_fix (). */
2551 target = gen_reg_rtx (mode);
2552 expand_fix (target, tmp, 0);
2554 return target;
2557 /* Expand a call to one of the builtin math functions doing integer
2558 conversion (lrint).
2559 Return 0 if a normal call should be emitted rather than expanding the
2560 function in-line. EXP is the expression that is a call to the builtin
2561 function; if convenient, the result should be placed in TARGET.
2562 SUBTARGET may be used as the target for computing one of EXP's operands. */
2564 static rtx
2565 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2567 convert_optab builtin_optab;
2568 rtx op0, insns;
2569 tree fndecl = get_callee_fndecl (exp);
2570 tree arg, narg;
2571 enum machine_mode mode;
2573 /* There's no easy way to detect the case we need to set EDOM. */
2574 if (flag_errno_math)
2575 return NULL_RTX;
2577 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2578 gcc_unreachable ();
2580 arg = CALL_EXPR_ARG (exp, 0);
2582 switch (DECL_FUNCTION_CODE (fndecl))
2584 CASE_FLT_FN (BUILT_IN_LRINT):
2585 CASE_FLT_FN (BUILT_IN_LLRINT):
2586 builtin_optab = lrint_optab; break;
2587 CASE_FLT_FN (BUILT_IN_LROUND):
2588 CASE_FLT_FN (BUILT_IN_LLROUND):
2589 builtin_optab = lround_optab; break;
2590 default:
2591 gcc_unreachable ();
2594 /* Make a suitable register to place result in. */
2595 mode = TYPE_MODE (TREE_TYPE (exp));
2597 target = gen_reg_rtx (mode);
2599 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2600 need to expand the argument again. This way, we will not perform
2601 side-effects more the once. */
2602 narg = builtin_save_expr (arg);
2603 if (narg != arg)
2605 arg = narg;
2606 exp = build_call_expr (fndecl, 1, arg);
2609 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2611 start_sequence ();
2613 if (expand_sfix_optab (target, op0, builtin_optab))
2615 /* Output the entire sequence. */
2616 insns = get_insns ();
2617 end_sequence ();
2618 emit_insn (insns);
2619 return target;
2622 /* If we were unable to expand via the builtin, stop the sequence
2623 (without outputting the insns) and call to the library function
2624 with the stabilized argument list. */
2625 end_sequence ();
2627 target = expand_call (exp, target, target == const0_rtx);
2629 return target;
2632 /* To evaluate powi(x,n), the floating point value x raised to the
2633 constant integer exponent n, we use a hybrid algorithm that
2634 combines the "window method" with look-up tables. For an
2635 introduction to exponentiation algorithms and "addition chains",
2636 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2637 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2638 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2639 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2641 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2642 multiplications to inline before calling the system library's pow
2643 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2644 so this default never requires calling pow, powf or powl. */
2646 #ifndef POWI_MAX_MULTS
2647 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2648 #endif
2650 /* The size of the "optimal power tree" lookup table. All
2651 exponents less than this value are simply looked up in the
2652 powi_table below. This threshold is also used to size the
2653 cache of pseudo registers that hold intermediate results. */
2654 #define POWI_TABLE_SIZE 256
2656 /* The size, in bits of the window, used in the "window method"
2657 exponentiation algorithm. This is equivalent to a radix of
2658 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2659 #define POWI_WINDOW_SIZE 3
2661 /* The following table is an efficient representation of an
2662 "optimal power tree". For each value, i, the corresponding
2663 value, j, in the table states than an optimal evaluation
2664 sequence for calculating pow(x,i) can be found by evaluating
2665 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2666 100 integers is given in Knuth's "Seminumerical algorithms". */
2668 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2670 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2671 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2672 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2673 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2674 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2675 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2676 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2677 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2678 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2679 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2680 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2681 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2682 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2683 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2684 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2685 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2686 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2687 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2688 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2689 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2690 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2691 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2692 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2693 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2694 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2695 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2696 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2697 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2698 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2699 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2700 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2701 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2705 /* Return the number of multiplications required to calculate
2706 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2707 subroutine of powi_cost. CACHE is an array indicating
2708 which exponents have already been calculated. */
2710 static int
2711 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2713 /* If we've already calculated this exponent, then this evaluation
2714 doesn't require any additional multiplications. */
2715 if (cache[n])
2716 return 0;
2718 cache[n] = true;
2719 return powi_lookup_cost (n - powi_table[n], cache)
2720 + powi_lookup_cost (powi_table[n], cache) + 1;
2723 /* Return the number of multiplications required to calculate
2724 powi(x,n) for an arbitrary x, given the exponent N. This
2725 function needs to be kept in sync with expand_powi below. */
2727 static int
2728 powi_cost (HOST_WIDE_INT n)
2730 bool cache[POWI_TABLE_SIZE];
2731 unsigned HOST_WIDE_INT digit;
2732 unsigned HOST_WIDE_INT val;
2733 int result;
2735 if (n == 0)
2736 return 0;
2738 /* Ignore the reciprocal when calculating the cost. */
2739 val = (n < 0) ? -n : n;
2741 /* Initialize the exponent cache. */
2742 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2743 cache[1] = true;
2745 result = 0;
2747 while (val >= POWI_TABLE_SIZE)
2749 if (val & 1)
2751 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2752 result += powi_lookup_cost (digit, cache)
2753 + POWI_WINDOW_SIZE + 1;
2754 val >>= POWI_WINDOW_SIZE;
2756 else
2758 val >>= 1;
2759 result++;
2763 return result + powi_lookup_cost (val, cache);
2766 /* Recursive subroutine of expand_powi. This function takes the array,
2767 CACHE, of already calculated exponents and an exponent N and returns
2768 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2770 static rtx
2771 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2773 unsigned HOST_WIDE_INT digit;
2774 rtx target, result;
2775 rtx op0, op1;
2777 if (n < POWI_TABLE_SIZE)
2779 if (cache[n])
2780 return cache[n];
2782 target = gen_reg_rtx (mode);
2783 cache[n] = target;
2785 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2786 op1 = expand_powi_1 (mode, powi_table[n], cache);
2788 else if (n & 1)
2790 target = gen_reg_rtx (mode);
2791 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2792 op0 = expand_powi_1 (mode, n - digit, cache);
2793 op1 = expand_powi_1 (mode, digit, cache);
2795 else
2797 target = gen_reg_rtx (mode);
2798 op0 = expand_powi_1 (mode, n >> 1, cache);
2799 op1 = op0;
2802 result = expand_mult (mode, op0, op1, target, 0);
2803 if (result != target)
2804 emit_move_insn (target, result);
2805 return target;
2808 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2809 floating point operand in mode MODE, and N is the exponent. This
2810 function needs to be kept in sync with powi_cost above. */
2812 static rtx
2813 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2815 unsigned HOST_WIDE_INT val;
2816 rtx cache[POWI_TABLE_SIZE];
2817 rtx result;
2819 if (n == 0)
2820 return CONST1_RTX (mode);
2822 val = (n < 0) ? -n : n;
2824 memset (cache, 0, sizeof (cache));
2825 cache[1] = x;
2827 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2829 /* If the original exponent was negative, reciprocate the result. */
2830 if (n < 0)
2831 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2832 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2834 return result;
2837 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2838 a normal call should be emitted rather than expanding the function
2839 in-line. EXP is the expression that is a call to the builtin
2840 function; if convenient, the result should be placed in TARGET. */
2842 static rtx
2843 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2845 tree arg0, arg1;
2846 tree fn, narg0;
2847 tree type = TREE_TYPE (exp);
2848 REAL_VALUE_TYPE cint, c, c2;
2849 HOST_WIDE_INT n;
2850 rtx op, op2;
2851 enum machine_mode mode = TYPE_MODE (type);
2853 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2854 return NULL_RTX;
2856 arg0 = CALL_EXPR_ARG (exp, 0);
2857 arg1 = CALL_EXPR_ARG (exp, 1);
2859 if (TREE_CODE (arg1) != REAL_CST
2860 || TREE_OVERFLOW (arg1))
2861 return expand_builtin_mathfn_2 (exp, target, subtarget);
2863 /* Handle constant exponents. */
2865 /* For integer valued exponents we can expand to an optimal multiplication
2866 sequence using expand_powi. */
2867 c = TREE_REAL_CST (arg1);
2868 n = real_to_integer (&c);
2869 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2870 if (real_identical (&c, &cint)
2871 && ((n >= -1 && n <= 2)
2872 || (flag_unsafe_math_optimizations
2873 && !optimize_size
2874 && powi_cost (n) <= POWI_MAX_MULTS)))
2876 op = expand_expr (arg0, subtarget, VOIDmode, 0);
2877 if (n != 1)
2879 op = force_reg (mode, op);
2880 op = expand_powi (op, mode, n);
2882 return op;
2885 narg0 = builtin_save_expr (arg0);
2887 /* If the exponent is not integer valued, check if it is half of an integer.
2888 In this case we can expand to sqrt (x) * x**(n/2). */
2889 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2890 if (fn != NULL_TREE)
2892 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2893 n = real_to_integer (&c2);
2894 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2895 if (real_identical (&c2, &cint)
2896 && ((flag_unsafe_math_optimizations
2897 && !optimize_size
2898 && powi_cost (n/2) <= POWI_MAX_MULTS)
2899 || n == 1))
2901 tree call_expr = build_call_expr (fn, 1, narg0);
2902 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2903 if (n != 1)
2905 op2 = expand_expr (narg0, subtarget, VOIDmode, 0);
2906 op2 = force_reg (mode, op2);
2907 op2 = expand_powi (op2, mode, abs (n / 2));
2908 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2909 0, OPTAB_LIB_WIDEN);
2910 /* If the original exponent was negative, reciprocate the
2911 result. */
2912 if (n < 0)
2913 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2914 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2916 return op;
2920 /* Try if the exponent is a third of an integer. In this case
2921 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2922 different from pow (x, 1./3.) due to rounding and behavior
2923 with negative x we need to constrain this transformation to
2924 unsafe math and positive x or finite math. */
2925 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2926 if (fn != NULL_TREE
2927 && flag_unsafe_math_optimizations
2928 && (tree_expr_nonnegative_p (arg0)
2929 || !HONOR_NANS (mode)))
2931 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2932 real_round (&c2, mode, &c2);
2933 n = real_to_integer (&c2);
2934 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2935 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2936 real_convert (&c2, mode, &c2);
2937 if (real_identical (&c2, &c)
2938 && ((!optimize_size
2939 && powi_cost (n/3) <= POWI_MAX_MULTS)
2940 || n == 1))
2942 tree call_expr = build_call_expr (fn, 1,narg0);
2943 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2944 if (abs (n) % 3 == 2)
2945 op = expand_simple_binop (mode, MULT, op, op, op,
2946 0, OPTAB_LIB_WIDEN);
2947 if (n != 1)
2949 op2 = expand_expr (narg0, subtarget, VOIDmode, 0);
2950 op2 = force_reg (mode, op2);
2951 op2 = expand_powi (op2, mode, abs (n / 3));
2952 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2953 0, OPTAB_LIB_WIDEN);
2954 /* If the original exponent was negative, reciprocate the
2955 result. */
2956 if (n < 0)
2957 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2958 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2960 return op;
2964 /* Fall back to optab expansion. */
2965 return expand_builtin_mathfn_2 (exp, target, subtarget);
2968 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2969 a normal call should be emitted rather than expanding the function
2970 in-line. EXP is the expression that is a call to the builtin
2971 function; if convenient, the result should be placed in TARGET. */
2973 static rtx
2974 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2976 tree arg0, arg1;
2977 rtx op0, op1;
2978 enum machine_mode mode;
2979 enum machine_mode mode2;
2981 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2982 return NULL_RTX;
2984 arg0 = CALL_EXPR_ARG (exp, 0);
2985 arg1 = CALL_EXPR_ARG (exp, 1);
2986 mode = TYPE_MODE (TREE_TYPE (exp));
2988 /* Handle constant power. */
2990 if (TREE_CODE (arg1) == INTEGER_CST
2991 && !TREE_OVERFLOW (arg1))
2993 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
2995 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2996 Otherwise, check the number of multiplications required. */
2997 if ((TREE_INT_CST_HIGH (arg1) == 0
2998 || TREE_INT_CST_HIGH (arg1) == -1)
2999 && ((n >= -1 && n <= 2)
3000 || (! optimize_size
3001 && powi_cost (n) <= POWI_MAX_MULTS)))
3003 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
3004 op0 = force_reg (mode, op0);
3005 return expand_powi (op0, mode, n);
3009 /* Emit a libcall to libgcc. */
3011 /* Mode of the 2nd argument must match that of an int. */
3012 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3014 if (target == NULL_RTX)
3015 target = gen_reg_rtx (mode);
3017 op0 = expand_expr (arg0, subtarget, mode, 0);
3018 if (GET_MODE (op0) != mode)
3019 op0 = convert_to_mode (mode, op0, 0);
3020 op1 = expand_expr (arg1, 0, mode2, 0);
3021 if (GET_MODE (op1) != mode2)
3022 op1 = convert_to_mode (mode2, op1, 0);
3024 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
3025 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3026 op0, mode, op1, mode2);
3028 return target;
3031 /* Expand expression EXP which is a call to the strlen builtin. Return
3032 NULL_RTX if we failed the caller should emit a normal call, otherwise
3033 try to get the result in TARGET, if convenient. */
3035 static rtx
3036 expand_builtin_strlen (tree exp, rtx target,
3037 enum machine_mode target_mode)
3039 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3040 return NULL_RTX;
3041 else
3043 rtx pat;
3044 tree len;
3045 tree src = CALL_EXPR_ARG (exp, 0);
3046 rtx result, src_reg, char_rtx, before_strlen;
3047 enum machine_mode insn_mode = target_mode, char_mode;
3048 enum insn_code icode = CODE_FOR_nothing;
3049 int align;
3051 /* If the length can be computed at compile-time, return it. */
3052 len = c_strlen (src, 0);
3053 if (len)
3054 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3056 /* If the length can be computed at compile-time and is constant
3057 integer, but there are side-effects in src, evaluate
3058 src for side-effects, then return len.
3059 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3060 can be optimized into: i++; x = 3; */
3061 len = c_strlen (src, 1);
3062 if (len && TREE_CODE (len) == INTEGER_CST)
3064 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3065 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3068 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3070 /* If SRC is not a pointer type, don't do this operation inline. */
3071 if (align == 0)
3072 return NULL_RTX;
3074 /* Bail out if we can't compute strlen in the right mode. */
3075 while (insn_mode != VOIDmode)
3077 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
3078 if (icode != CODE_FOR_nothing)
3079 break;
3081 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3083 if (insn_mode == VOIDmode)
3084 return NULL_RTX;
3086 /* Make a place to write the result of the instruction. */
3087 result = target;
3088 if (! (result != 0
3089 && REG_P (result)
3090 && GET_MODE (result) == insn_mode
3091 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3092 result = gen_reg_rtx (insn_mode);
3094 /* Make a place to hold the source address. We will not expand
3095 the actual source until we are sure that the expansion will
3096 not fail -- there are trees that cannot be expanded twice. */
3097 src_reg = gen_reg_rtx (Pmode);
3099 /* Mark the beginning of the strlen sequence so we can emit the
3100 source operand later. */
3101 before_strlen = get_last_insn ();
3103 char_rtx = const0_rtx;
3104 char_mode = insn_data[(int) icode].operand[2].mode;
3105 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3106 char_mode))
3107 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3109 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3110 char_rtx, GEN_INT (align));
3111 if (! pat)
3112 return NULL_RTX;
3113 emit_insn (pat);
3115 /* Now that we are assured of success, expand the source. */
3116 start_sequence ();
3117 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3118 if (pat != src_reg)
3119 emit_move_insn (src_reg, pat);
3120 pat = get_insns ();
3121 end_sequence ();
3123 if (before_strlen)
3124 emit_insn_after (pat, before_strlen);
3125 else
3126 emit_insn_before (pat, get_insns ());
3128 /* Return the value in the proper mode for this function. */
3129 if (GET_MODE (result) == target_mode)
3130 target = result;
3131 else if (target != 0)
3132 convert_move (target, result, 0);
3133 else
3134 target = convert_to_mode (target_mode, result, 0);
3136 return target;
3140 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3141 caller should emit a normal call, otherwise try to get the result
3142 in TARGET, if convenient (and in mode MODE if that's convenient). */
3144 static rtx
3145 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3147 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3149 tree type = TREE_TYPE (exp);
3150 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3151 CALL_EXPR_ARG (exp, 1), type);
3152 if (result)
3153 return expand_expr (result, target, mode, EXPAND_NORMAL);
3155 return NULL_RTX;
3158 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3159 caller should emit a normal call, otherwise try to get the result
3160 in TARGET, if convenient (and in mode MODE if that's convenient). */
3162 static rtx
3163 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3165 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3167 tree type = TREE_TYPE (exp);
3168 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3169 CALL_EXPR_ARG (exp, 1), type);
3170 if (result)
3171 return expand_expr (result, target, mode, EXPAND_NORMAL);
3173 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3175 return NULL_RTX;
3178 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3179 caller should emit a normal call, otherwise try to get the result
3180 in TARGET, if convenient (and in mode MODE if that's convenient). */
3182 static rtx
3183 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3185 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3187 tree type = TREE_TYPE (exp);
3188 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3189 CALL_EXPR_ARG (exp, 1), type);
3190 if (result)
3191 return expand_expr (result, target, mode, EXPAND_NORMAL);
3193 return NULL_RTX;
3196 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3197 caller should emit a normal call, otherwise try to get the result
3198 in TARGET, if convenient (and in mode MODE if that's convenient). */
3200 static rtx
3201 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3203 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3205 tree type = TREE_TYPE (exp);
3206 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3207 CALL_EXPR_ARG (exp, 1), type);
3208 if (result)
3209 return expand_expr (result, target, mode, EXPAND_NORMAL);
3211 return NULL_RTX;
3214 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3215 bytes from constant string DATA + OFFSET and return it as target
3216 constant. */
3218 static rtx
3219 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3220 enum machine_mode mode)
3222 const char *str = (const char *) data;
3224 gcc_assert (offset >= 0
3225 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3226 <= strlen (str) + 1));
3228 return c_readstr (str + offset, mode);
3231 /* Expand a call EXP to the memcpy builtin.
3232 Return NULL_RTX if we failed, the caller should emit a normal call,
3233 otherwise try to get the result in TARGET, if convenient (and in
3234 mode MODE if that's convenient). */
3236 static rtx
3237 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3239 tree fndecl = get_callee_fndecl (exp);
3241 if (!validate_arglist (exp,
3242 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3243 return NULL_RTX;
3244 else
3246 tree dest = CALL_EXPR_ARG (exp, 0);
3247 tree src = CALL_EXPR_ARG (exp, 1);
3248 tree len = CALL_EXPR_ARG (exp, 2);
3249 const char *src_str;
3250 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3251 unsigned int dest_align
3252 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3253 rtx dest_mem, src_mem, dest_addr, len_rtx;
3254 tree result = fold_builtin_memory_op (dest, src, len,
3255 TREE_TYPE (TREE_TYPE (fndecl)),
3256 false, /*endp=*/0);
3257 HOST_WIDE_INT expected_size = -1;
3258 unsigned int expected_align = 0;
3260 if (result)
3262 while (TREE_CODE (result) == COMPOUND_EXPR)
3264 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3265 EXPAND_NORMAL);
3266 result = TREE_OPERAND (result, 1);
3268 return expand_expr (result, target, mode, EXPAND_NORMAL);
3271 /* If DEST is not a pointer type, call the normal function. */
3272 if (dest_align == 0)
3273 return NULL_RTX;
3275 /* If either SRC is not a pointer type, don't do this
3276 operation in-line. */
3277 if (src_align == 0)
3278 return NULL_RTX;
3280 stringop_block_profile (exp, &expected_align, &expected_size);
3281 if (expected_align < dest_align)
3282 expected_align = dest_align;
3283 dest_mem = get_memory_rtx (dest, len);
3284 set_mem_align (dest_mem, dest_align);
3285 len_rtx = expand_normal (len);
3286 src_str = c_getstr (src);
3288 /* If SRC is a string constant and block move would be done
3289 by pieces, we can avoid loading the string from memory
3290 and only stored the computed constants. */
3291 if (src_str
3292 && GET_CODE (len_rtx) == CONST_INT
3293 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3294 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3295 (void *) src_str, dest_align))
3297 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3298 builtin_memcpy_read_str,
3299 (void *) src_str, dest_align, 0);
3300 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3301 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3302 return dest_mem;
3305 src_mem = get_memory_rtx (src, len);
3306 set_mem_align (src_mem, src_align);
3308 /* Copy word part most expediently. */
3309 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3310 CALL_EXPR_TAILCALL (exp)
3311 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3312 expected_align, expected_size);
3314 if (dest_addr == 0)
3316 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3317 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3319 return dest_addr;
3323 /* Expand a call EXP to the mempcpy builtin.
3324 Return NULL_RTX if we failed; the caller should emit a normal call,
3325 otherwise try to get the result in TARGET, if convenient (and in
3326 mode MODE if that's convenient). If ENDP is 0 return the
3327 destination pointer, if ENDP is 1 return the end pointer ala
3328 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3329 stpcpy. */
3331 static rtx
3332 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3334 if (!validate_arglist (exp,
3335 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3336 return NULL_RTX;
3337 else
3339 tree dest = CALL_EXPR_ARG (exp, 0);
3340 tree src = CALL_EXPR_ARG (exp, 1);
3341 tree len = CALL_EXPR_ARG (exp, 2);
3342 return expand_builtin_mempcpy_args (dest, src, len,
3343 TREE_TYPE (exp),
3344 target, mode, /*endp=*/ 1);
3348 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3349 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3350 so that this can also be called without constructing an actual CALL_EXPR.
3351 TYPE is the return type of the call. The other arguments and return value
3352 are the same as for expand_builtin_mempcpy. */
3354 static rtx
3355 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3356 rtx target, enum machine_mode mode, int endp)
3358 /* If return value is ignored, transform mempcpy into memcpy. */
3359 if (target == const0_rtx)
3361 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3363 if (!fn)
3364 return NULL_RTX;
3366 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3367 target, mode, EXPAND_NORMAL);
3369 else
3371 const char *src_str;
3372 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3373 unsigned int dest_align
3374 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3375 rtx dest_mem, src_mem, len_rtx;
3376 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3378 if (result)
3380 while (TREE_CODE (result) == COMPOUND_EXPR)
3382 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3383 EXPAND_NORMAL);
3384 result = TREE_OPERAND (result, 1);
3386 return expand_expr (result, target, mode, EXPAND_NORMAL);
3389 /* If either SRC or DEST is not a pointer type, don't do this
3390 operation in-line. */
3391 if (dest_align == 0 || src_align == 0)
3392 return NULL_RTX;
3394 /* If LEN is not constant, call the normal function. */
3395 if (! host_integerp (len, 1))
3396 return NULL_RTX;
3398 len_rtx = expand_normal (len);
3399 src_str = c_getstr (src);
3401 /* If SRC is a string constant and block move would be done
3402 by pieces, we can avoid loading the string from memory
3403 and only stored the computed constants. */
3404 if (src_str
3405 && GET_CODE (len_rtx) == CONST_INT
3406 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3407 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3408 (void *) src_str, dest_align))
3410 dest_mem = get_memory_rtx (dest, len);
3411 set_mem_align (dest_mem, dest_align);
3412 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3413 builtin_memcpy_read_str,
3414 (void *) src_str, dest_align, endp);
3415 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3416 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3417 return dest_mem;
3420 if (GET_CODE (len_rtx) == CONST_INT
3421 && can_move_by_pieces (INTVAL (len_rtx),
3422 MIN (dest_align, src_align)))
3424 dest_mem = get_memory_rtx (dest, len);
3425 set_mem_align (dest_mem, dest_align);
3426 src_mem = get_memory_rtx (src, len);
3427 set_mem_align (src_mem, src_align);
3428 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3429 MIN (dest_align, src_align), endp);
3430 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3431 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3432 return dest_mem;
3435 return NULL_RTX;
3439 /* Expand expression EXP, which is a call to the memmove builtin. Return
3440 NULL_RTX if we failed; the caller should emit a normal call. */
3442 static rtx
3443 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3445 if (!validate_arglist (exp,
3446 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3447 return NULL_RTX;
3448 else
3450 tree dest = CALL_EXPR_ARG (exp, 0);
3451 tree src = CALL_EXPR_ARG (exp, 1);
3452 tree len = CALL_EXPR_ARG (exp, 2);
3453 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3454 target, mode, ignore);
3458 /* Helper function to do the actual work for expand_builtin_memmove. The
3459 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3460 so that this can also be called without constructing an actual CALL_EXPR.
3461 TYPE is the return type of the call. The other arguments and return value
3462 are the same as for expand_builtin_memmove. */
3464 static rtx
3465 expand_builtin_memmove_args (tree dest, tree src, tree len,
3466 tree type, rtx target, enum machine_mode mode,
3467 int ignore)
3469 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3471 if (result)
3473 while (TREE_CODE (result) == COMPOUND_EXPR)
3475 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3476 EXPAND_NORMAL);
3477 result = TREE_OPERAND (result, 1);
3479 return expand_expr (result, target, mode, EXPAND_NORMAL);
3482 /* Otherwise, call the normal function. */
3483 return NULL_RTX;
3486 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3487 NULL_RTX if we failed the caller should emit a normal call. */
3489 static rtx
3490 expand_builtin_bcopy (tree exp, int ignore)
3492 tree type = TREE_TYPE (exp);
3493 tree src, dest, size;
3495 if (!validate_arglist (exp,
3496 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3497 return NULL_RTX;
3499 src = CALL_EXPR_ARG (exp, 0);
3500 dest = CALL_EXPR_ARG (exp, 1);
3501 size = CALL_EXPR_ARG (exp, 2);
3503 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3504 This is done this way so that if it isn't expanded inline, we fall
3505 back to calling bcopy instead of memmove. */
3506 return expand_builtin_memmove_args (dest, src,
3507 fold_convert (sizetype, size),
3508 type, const0_rtx, VOIDmode,
3509 ignore);
3512 #ifndef HAVE_movstr
3513 # define HAVE_movstr 0
3514 # define CODE_FOR_movstr CODE_FOR_nothing
3515 #endif
3517 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3518 we failed, the caller should emit a normal call, otherwise try to
3519 get the result in TARGET, if convenient. If ENDP is 0 return the
3520 destination pointer, if ENDP is 1 return the end pointer ala
3521 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3522 stpcpy. */
3524 static rtx
3525 expand_movstr (tree dest, tree src, rtx target, int endp)
3527 rtx end;
3528 rtx dest_mem;
3529 rtx src_mem;
3530 rtx insn;
3531 const struct insn_data * data;
3533 if (!HAVE_movstr)
3534 return NULL_RTX;
3536 dest_mem = get_memory_rtx (dest, NULL);
3537 src_mem = get_memory_rtx (src, NULL);
3538 if (!endp)
3540 target = force_reg (Pmode, XEXP (dest_mem, 0));
3541 dest_mem = replace_equiv_address (dest_mem, target);
3542 end = gen_reg_rtx (Pmode);
3544 else
3546 if (target == 0 || target == const0_rtx)
3548 end = gen_reg_rtx (Pmode);
3549 if (target == 0)
3550 target = end;
3552 else
3553 end = target;
3556 data = insn_data + CODE_FOR_movstr;
3558 if (data->operand[0].mode != VOIDmode)
3559 end = gen_lowpart (data->operand[0].mode, end);
3561 insn = data->genfun (end, dest_mem, src_mem);
3563 gcc_assert (insn);
3565 emit_insn (insn);
3567 /* movstr is supposed to set end to the address of the NUL
3568 terminator. If the caller requested a mempcpy-like return value,
3569 adjust it. */
3570 if (endp == 1 && target != const0_rtx)
3572 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3573 emit_move_insn (target, force_operand (tem, NULL_RTX));
3576 return target;
3579 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3580 NULL_RTX if we failed the caller should emit a normal call, otherwise
3581 try to get the result in TARGET, if convenient (and in mode MODE if that's
3582 convenient). */
3584 static rtx
3585 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3587 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3589 tree dest = CALL_EXPR_ARG (exp, 0);
3590 tree src = CALL_EXPR_ARG (exp, 1);
3591 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3593 return NULL_RTX;
3596 /* Helper function to do the actual work for expand_builtin_strcpy. The
3597 arguments to the builtin_strcpy call DEST and SRC are broken out
3598 so that this can also be called without constructing an actual CALL_EXPR.
3599 The other arguments and return value are the same as for
3600 expand_builtin_strcpy. */
3602 static rtx
3603 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3604 rtx target, enum machine_mode mode)
3606 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3607 if (result)
3608 return expand_expr (result, target, mode, EXPAND_NORMAL);
3609 return expand_movstr (dest, src, target, /*endp=*/0);
3613 /* Expand a call EXP to the stpcpy builtin.
3614 Return NULL_RTX if we failed the caller should emit a normal call,
3615 otherwise try to get the result in TARGET, if convenient (and in
3616 mode MODE if that's convenient). */
3618 static rtx
3619 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3621 tree dst, src;
3623 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3624 return NULL_RTX;
3626 dst = CALL_EXPR_ARG (exp, 0);
3627 src = CALL_EXPR_ARG (exp, 1);
3629 /* If return value is ignored, transform stpcpy into strcpy. */
3630 if (target == const0_rtx)
3632 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3633 if (!fn)
3634 return NULL_RTX;
3636 return expand_expr (build_call_expr (fn, 2, dst, src),
3637 target, mode, EXPAND_NORMAL);
3639 else
3641 tree len, lenp1;
3642 rtx ret;
3644 /* Ensure we get an actual string whose length can be evaluated at
3645 compile-time, not an expression containing a string. This is
3646 because the latter will potentially produce pessimized code
3647 when used to produce the return value. */
3648 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3649 return expand_movstr (dst, src, target, /*endp=*/2);
3651 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3652 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3653 target, mode, /*endp=*/2);
3655 if (ret)
3656 return ret;
3658 if (TREE_CODE (len) == INTEGER_CST)
3660 rtx len_rtx = expand_normal (len);
3662 if (GET_CODE (len_rtx) == CONST_INT)
3664 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3665 dst, src, target, mode);
3667 if (ret)
3669 if (! target)
3671 if (mode != VOIDmode)
3672 target = gen_reg_rtx (mode);
3673 else
3674 target = gen_reg_rtx (GET_MODE (ret));
3676 if (GET_MODE (target) != GET_MODE (ret))
3677 ret = gen_lowpart (GET_MODE (target), ret);
3679 ret = plus_constant (ret, INTVAL (len_rtx));
3680 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3681 gcc_assert (ret);
3683 return target;
3688 return expand_movstr (dst, src, target, /*endp=*/2);
3692 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3693 bytes from constant string DATA + OFFSET and return it as target
3694 constant. */
3696 static rtx
3697 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3698 enum machine_mode mode)
3700 const char *str = (const char *) data;
3702 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3703 return const0_rtx;
3705 return c_readstr (str + offset, mode);
3708 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3709 NULL_RTX if we failed the caller should emit a normal call. */
3711 static rtx
3712 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3714 tree fndecl = get_callee_fndecl (exp);
3716 if (validate_arglist (exp,
3717 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3719 tree dest = CALL_EXPR_ARG (exp, 0);
3720 tree src = CALL_EXPR_ARG (exp, 1);
3721 tree len = CALL_EXPR_ARG (exp, 2);
3722 tree slen = c_strlen (src, 1);
3723 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3725 if (result)
3727 while (TREE_CODE (result) == COMPOUND_EXPR)
3729 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3730 EXPAND_NORMAL);
3731 result = TREE_OPERAND (result, 1);
3733 return expand_expr (result, target, mode, EXPAND_NORMAL);
3736 /* We must be passed a constant len and src parameter. */
3737 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3738 return NULL_RTX;
3740 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3742 /* We're required to pad with trailing zeros if the requested
3743 len is greater than strlen(s2)+1. In that case try to
3744 use store_by_pieces, if it fails, punt. */
3745 if (tree_int_cst_lt (slen, len))
3747 unsigned int dest_align
3748 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3749 const char *p = c_getstr (src);
3750 rtx dest_mem;
3752 if (!p || dest_align == 0 || !host_integerp (len, 1)
3753 || !can_store_by_pieces (tree_low_cst (len, 1),
3754 builtin_strncpy_read_str,
3755 (void *) p, dest_align))
3756 return NULL_RTX;
3758 dest_mem = get_memory_rtx (dest, len);
3759 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3760 builtin_strncpy_read_str,
3761 (void *) p, dest_align, 0);
3762 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3763 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3764 return dest_mem;
3767 return NULL_RTX;
3770 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3771 bytes from constant string DATA + OFFSET and return it as target
3772 constant. */
3775 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3776 enum machine_mode mode)
3778 const char *c = (const char *) data;
3779 char *p = alloca (GET_MODE_SIZE (mode));
3781 memset (p, *c, GET_MODE_SIZE (mode));
3783 return c_readstr (p, mode);
3786 /* Callback routine for store_by_pieces. Return the RTL of a register
3787 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3788 char value given in the RTL register data. For example, if mode is
3789 4 bytes wide, return the RTL for 0x01010101*data. */
3791 static rtx
3792 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3793 enum machine_mode mode)
3795 rtx target, coeff;
3796 size_t size;
3797 char *p;
3799 size = GET_MODE_SIZE (mode);
3800 if (size == 1)
3801 return (rtx) data;
3803 p = alloca (size);
3804 memset (p, 1, size);
3805 coeff = c_readstr (p, mode);
3807 target = convert_to_mode (mode, (rtx) data, 1);
3808 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3809 return force_reg (mode, target);
3812 /* Expand expression EXP, which is a call to the memset builtin. Return
3813 NULL_RTX if we failed the caller should emit a normal call, otherwise
3814 try to get the result in TARGET, if convenient (and in mode MODE if that's
3815 convenient). */
3817 static rtx
3818 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3820 if (!validate_arglist (exp,
3821 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3822 return NULL_RTX;
3823 else
3825 tree dest = CALL_EXPR_ARG (exp, 0);
3826 tree val = CALL_EXPR_ARG (exp, 1);
3827 tree len = CALL_EXPR_ARG (exp, 2);
3828 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3832 /* Helper function to do the actual work for expand_builtin_memset. The
3833 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3834 so that this can also be called without constructing an actual CALL_EXPR.
3835 The other arguments and return value are the same as for
3836 expand_builtin_memset. */
3838 static rtx
3839 expand_builtin_memset_args (tree dest, tree val, tree len,
3840 rtx target, enum machine_mode mode, tree orig_exp)
3842 tree fndecl, fn;
3843 enum built_in_function fcode;
3844 char c;
3845 unsigned int dest_align;
3846 rtx dest_mem, dest_addr, len_rtx;
3847 HOST_WIDE_INT expected_size = -1;
3848 unsigned int expected_align = 0;
3850 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3852 /* If DEST is not a pointer type, don't do this operation in-line. */
3853 if (dest_align == 0)
3854 return NULL_RTX;
3856 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3857 if (expected_align < dest_align)
3858 expected_align = dest_align;
3860 /* If the LEN parameter is zero, return DEST. */
3861 if (integer_zerop (len))
3863 /* Evaluate and ignore VAL in case it has side-effects. */
3864 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3865 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3868 /* Stabilize the arguments in case we fail. */
3869 dest = builtin_save_expr (dest);
3870 val = builtin_save_expr (val);
3871 len = builtin_save_expr (len);
3873 len_rtx = expand_normal (len);
3874 dest_mem = get_memory_rtx (dest, len);
3876 if (TREE_CODE (val) != INTEGER_CST)
3878 rtx val_rtx;
3880 val_rtx = expand_normal (val);
3881 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3882 val_rtx, 0);
3884 /* Assume that we can memset by pieces if we can store
3885 * the coefficients by pieces (in the required modes).
3886 * We can't pass builtin_memset_gen_str as that emits RTL. */
3887 c = 1;
3888 if (host_integerp (len, 1)
3889 && !(optimize_size && tree_low_cst (len, 1) > 1)
3890 && can_store_by_pieces (tree_low_cst (len, 1),
3891 builtin_memset_read_str, &c, dest_align))
3893 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3894 val_rtx);
3895 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3896 builtin_memset_gen_str, val_rtx, dest_align, 0);
3898 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3899 dest_align, expected_align,
3900 expected_size))
3901 goto do_libcall;
3903 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3904 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3905 return dest_mem;
3908 if (target_char_cast (val, &c))
3909 goto do_libcall;
3911 if (c)
3913 if (host_integerp (len, 1)
3914 && !(optimize_size && tree_low_cst (len, 1) > 1)
3915 && can_store_by_pieces (tree_low_cst (len, 1),
3916 builtin_memset_read_str, &c, dest_align))
3917 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3918 builtin_memset_read_str, &c, dest_align, 0);
3919 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3920 dest_align, expected_align,
3921 expected_size))
3922 goto do_libcall;
3924 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3925 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3926 return dest_mem;
3929 set_mem_align (dest_mem, dest_align);
3930 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3931 CALL_EXPR_TAILCALL (orig_exp)
3932 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3933 expected_align, expected_size);
3935 if (dest_addr == 0)
3937 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3938 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3941 return dest_addr;
3943 do_libcall:
3944 fndecl = get_callee_fndecl (orig_exp);
3945 fcode = DECL_FUNCTION_CODE (fndecl);
3946 if (fcode == BUILT_IN_MEMSET)
3947 fn = build_call_expr (fndecl, 3, dest, val, len);
3948 else if (fcode == BUILT_IN_BZERO)
3949 fn = build_call_expr (fndecl, 2, dest, len);
3950 else
3951 gcc_unreachable ();
3952 if (TREE_CODE (fn) == CALL_EXPR)
3953 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3954 return expand_call (fn, target, target == const0_rtx);
3957 /* Expand expression EXP, which is a call to the bzero builtin. Return
3958 NULL_RTX if we failed the caller should emit a normal call. */
3960 static rtx
3961 expand_builtin_bzero (tree exp)
3963 tree dest, size;
3965 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3966 return NULL_RTX;
3968 dest = CALL_EXPR_ARG (exp, 0);
3969 size = CALL_EXPR_ARG (exp, 1);
3971 /* New argument list transforming bzero(ptr x, int y) to
3972 memset(ptr x, int 0, size_t y). This is done this way
3973 so that if it isn't expanded inline, we fallback to
3974 calling bzero instead of memset. */
3976 return expand_builtin_memset_args (dest, integer_zero_node,
3977 fold_convert (sizetype, size),
3978 const0_rtx, VOIDmode, exp);
3981 /* Expand expression EXP, which is a call to the memcmp built-in function.
3982 Return NULL_RTX if we failed and the
3983 caller should emit a normal call, otherwise try to get the result in
3984 TARGET, if convenient (and in mode MODE, if that's convenient). */
3986 static rtx
3987 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
3989 if (!validate_arglist (exp,
3990 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3991 return NULL_RTX;
3992 else
3994 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
3995 CALL_EXPR_ARG (exp, 1),
3996 CALL_EXPR_ARG (exp, 2));
3997 if (result)
3998 return expand_expr (result, target, mode, EXPAND_NORMAL);
4001 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4003 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4004 rtx result;
4005 rtx insn;
4006 tree arg1 = CALL_EXPR_ARG (exp, 0);
4007 tree arg2 = CALL_EXPR_ARG (exp, 1);
4008 tree len = CALL_EXPR_ARG (exp, 2);
4010 int arg1_align
4011 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4012 int arg2_align
4013 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4014 enum machine_mode insn_mode;
4016 #ifdef HAVE_cmpmemsi
4017 if (HAVE_cmpmemsi)
4018 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4019 else
4020 #endif
4021 #ifdef HAVE_cmpstrnsi
4022 if (HAVE_cmpstrnsi)
4023 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4024 else
4025 #endif
4026 return NULL_RTX;
4028 /* If we don't have POINTER_TYPE, call the function. */
4029 if (arg1_align == 0 || arg2_align == 0)
4030 return NULL_RTX;
4032 /* Make a place to write the result of the instruction. */
4033 result = target;
4034 if (! (result != 0
4035 && REG_P (result) && GET_MODE (result) == insn_mode
4036 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4037 result = gen_reg_rtx (insn_mode);
4039 arg1_rtx = get_memory_rtx (arg1, len);
4040 arg2_rtx = get_memory_rtx (arg2, len);
4041 arg3_rtx = expand_normal (len);
4043 /* Set MEM_SIZE as appropriate. */
4044 if (GET_CODE (arg3_rtx) == CONST_INT)
4046 set_mem_size (arg1_rtx, arg3_rtx);
4047 set_mem_size (arg2_rtx, arg3_rtx);
4050 #ifdef HAVE_cmpmemsi
4051 if (HAVE_cmpmemsi)
4052 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4053 GEN_INT (MIN (arg1_align, arg2_align)));
4054 else
4055 #endif
4056 #ifdef HAVE_cmpstrnsi
4057 if (HAVE_cmpstrnsi)
4058 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4059 GEN_INT (MIN (arg1_align, arg2_align)));
4060 else
4061 #endif
4062 gcc_unreachable ();
4064 if (insn)
4065 emit_insn (insn);
4066 else
4067 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4068 TYPE_MODE (integer_type_node), 3,
4069 XEXP (arg1_rtx, 0), Pmode,
4070 XEXP (arg2_rtx, 0), Pmode,
4071 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4072 TYPE_UNSIGNED (sizetype)),
4073 TYPE_MODE (sizetype));
4075 /* Return the value in the proper mode for this function. */
4076 mode = TYPE_MODE (TREE_TYPE (exp));
4077 if (GET_MODE (result) == mode)
4078 return result;
4079 else if (target != 0)
4081 convert_move (target, result, 0);
4082 return target;
4084 else
4085 return convert_to_mode (mode, result, 0);
4087 #endif
4089 return NULL_RTX;
4092 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4093 if we failed the caller should emit a normal call, otherwise try to get
4094 the result in TARGET, if convenient. */
4096 static rtx
4097 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4099 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4100 return NULL_RTX;
4101 else
4103 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4104 CALL_EXPR_ARG (exp, 1));
4105 if (result)
4106 return expand_expr (result, target, mode, EXPAND_NORMAL);
4109 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4110 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4111 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4113 rtx arg1_rtx, arg2_rtx;
4114 rtx result, insn = NULL_RTX;
4115 tree fndecl, fn;
4116 tree arg1 = CALL_EXPR_ARG (exp, 0);
4117 tree arg2 = CALL_EXPR_ARG (exp, 1);
4119 int arg1_align
4120 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4121 int arg2_align
4122 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4124 /* If we don't have POINTER_TYPE, call the function. */
4125 if (arg1_align == 0 || arg2_align == 0)
4126 return NULL_RTX;
4128 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4129 arg1 = builtin_save_expr (arg1);
4130 arg2 = builtin_save_expr (arg2);
4132 arg1_rtx = get_memory_rtx (arg1, NULL);
4133 arg2_rtx = get_memory_rtx (arg2, NULL);
4135 #ifdef HAVE_cmpstrsi
4136 /* Try to call cmpstrsi. */
4137 if (HAVE_cmpstrsi)
4139 enum machine_mode insn_mode
4140 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4142 /* Make a place to write the result of the instruction. */
4143 result = target;
4144 if (! (result != 0
4145 && REG_P (result) && GET_MODE (result) == insn_mode
4146 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4147 result = gen_reg_rtx (insn_mode);
4149 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4150 GEN_INT (MIN (arg1_align, arg2_align)));
4152 #endif
4153 #ifdef HAVE_cmpstrnsi
4154 /* Try to determine at least one length and call cmpstrnsi. */
4155 if (!insn && HAVE_cmpstrnsi)
4157 tree len;
4158 rtx arg3_rtx;
4160 enum machine_mode insn_mode
4161 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4162 tree len1 = c_strlen (arg1, 1);
4163 tree len2 = c_strlen (arg2, 1);
4165 if (len1)
4166 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4167 if (len2)
4168 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4170 /* If we don't have a constant length for the first, use the length
4171 of the second, if we know it. We don't require a constant for
4172 this case; some cost analysis could be done if both are available
4173 but neither is constant. For now, assume they're equally cheap,
4174 unless one has side effects. If both strings have constant lengths,
4175 use the smaller. */
4177 if (!len1)
4178 len = len2;
4179 else if (!len2)
4180 len = len1;
4181 else if (TREE_SIDE_EFFECTS (len1))
4182 len = len2;
4183 else if (TREE_SIDE_EFFECTS (len2))
4184 len = len1;
4185 else if (TREE_CODE (len1) != INTEGER_CST)
4186 len = len2;
4187 else if (TREE_CODE (len2) != INTEGER_CST)
4188 len = len1;
4189 else if (tree_int_cst_lt (len1, len2))
4190 len = len1;
4191 else
4192 len = len2;
4194 /* If both arguments have side effects, we cannot optimize. */
4195 if (!len || TREE_SIDE_EFFECTS (len))
4196 goto do_libcall;
4198 arg3_rtx = expand_normal (len);
4200 /* Make a place to write the result of the instruction. */
4201 result = target;
4202 if (! (result != 0
4203 && REG_P (result) && GET_MODE (result) == insn_mode
4204 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4205 result = gen_reg_rtx (insn_mode);
4207 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4208 GEN_INT (MIN (arg1_align, arg2_align)));
4210 #endif
4212 if (insn)
4214 emit_insn (insn);
4216 /* Return the value in the proper mode for this function. */
4217 mode = TYPE_MODE (TREE_TYPE (exp));
4218 if (GET_MODE (result) == mode)
4219 return result;
4220 if (target == 0)
4221 return convert_to_mode (mode, result, 0);
4222 convert_move (target, result, 0);
4223 return target;
4226 /* Expand the library call ourselves using a stabilized argument
4227 list to avoid re-evaluating the function's arguments twice. */
4228 #ifdef HAVE_cmpstrnsi
4229 do_libcall:
4230 #endif
4231 fndecl = get_callee_fndecl (exp);
4232 fn = build_call_expr (fndecl, 2, arg1, arg2);
4233 if (TREE_CODE (fn) == CALL_EXPR)
4234 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4235 return expand_call (fn, target, target == const0_rtx);
4237 #endif
4238 return NULL_RTX;
4241 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4242 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4243 the result in TARGET, if convenient. */
4245 static rtx
4246 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4248 if (!validate_arglist (exp,
4249 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4250 return NULL_RTX;
4251 else
4253 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4254 CALL_EXPR_ARG (exp, 1),
4255 CALL_EXPR_ARG (exp, 2));
4256 if (result)
4257 return expand_expr (result, target, mode, EXPAND_NORMAL);
4260 /* If c_strlen can determine an expression for one of the string
4261 lengths, and it doesn't have side effects, then emit cmpstrnsi
4262 using length MIN(strlen(string)+1, arg3). */
4263 #ifdef HAVE_cmpstrnsi
4264 if (HAVE_cmpstrnsi)
4266 tree len, len1, len2;
4267 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4268 rtx result, insn;
4269 tree fndecl, fn;
4270 tree arg1 = CALL_EXPR_ARG (exp, 0);
4271 tree arg2 = CALL_EXPR_ARG (exp, 1);
4272 tree arg3 = CALL_EXPR_ARG (exp, 2);
4274 int arg1_align
4275 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4276 int arg2_align
4277 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4278 enum machine_mode insn_mode
4279 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4281 len1 = c_strlen (arg1, 1);
4282 len2 = c_strlen (arg2, 1);
4284 if (len1)
4285 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4286 if (len2)
4287 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4289 /* If we don't have a constant length for the first, use the length
4290 of the second, if we know it. We don't require a constant for
4291 this case; some cost analysis could be done if both are available
4292 but neither is constant. For now, assume they're equally cheap,
4293 unless one has side effects. If both strings have constant lengths,
4294 use the smaller. */
4296 if (!len1)
4297 len = len2;
4298 else if (!len2)
4299 len = len1;
4300 else if (TREE_SIDE_EFFECTS (len1))
4301 len = len2;
4302 else if (TREE_SIDE_EFFECTS (len2))
4303 len = len1;
4304 else if (TREE_CODE (len1) != INTEGER_CST)
4305 len = len2;
4306 else if (TREE_CODE (len2) != INTEGER_CST)
4307 len = len1;
4308 else if (tree_int_cst_lt (len1, len2))
4309 len = len1;
4310 else
4311 len = len2;
4313 /* If both arguments have side effects, we cannot optimize. */
4314 if (!len || TREE_SIDE_EFFECTS (len))
4315 return NULL_RTX;
4317 /* The actual new length parameter is MIN(len,arg3). */
4318 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4319 fold_convert (TREE_TYPE (len), arg3));
4321 /* If we don't have POINTER_TYPE, call the function. */
4322 if (arg1_align == 0 || arg2_align == 0)
4323 return NULL_RTX;
4325 /* Make a place to write the result of the instruction. */
4326 result = target;
4327 if (! (result != 0
4328 && REG_P (result) && GET_MODE (result) == insn_mode
4329 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4330 result = gen_reg_rtx (insn_mode);
4332 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4333 arg1 = builtin_save_expr (arg1);
4334 arg2 = builtin_save_expr (arg2);
4335 len = builtin_save_expr (len);
4337 arg1_rtx = get_memory_rtx (arg1, len);
4338 arg2_rtx = get_memory_rtx (arg2, len);
4339 arg3_rtx = expand_normal (len);
4340 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4341 GEN_INT (MIN (arg1_align, arg2_align)));
4342 if (insn)
4344 emit_insn (insn);
4346 /* Return the value in the proper mode for this function. */
4347 mode = TYPE_MODE (TREE_TYPE (exp));
4348 if (GET_MODE (result) == mode)
4349 return result;
4350 if (target == 0)
4351 return convert_to_mode (mode, result, 0);
4352 convert_move (target, result, 0);
4353 return target;
4356 /* Expand the library call ourselves using a stabilized argument
4357 list to avoid re-evaluating the function's arguments twice. */
4358 fndecl = get_callee_fndecl (exp);
4359 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4360 if (TREE_CODE (fn) == CALL_EXPR)
4361 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4362 return expand_call (fn, target, target == const0_rtx);
4364 #endif
4365 return NULL_RTX;
4368 /* Expand expression EXP, which is a call to the strcat builtin.
4369 Return NULL_RTX if we failed the caller should emit a normal call,
4370 otherwise try to get the result in TARGET, if convenient. */
4372 static rtx
4373 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4375 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4376 return NULL_RTX;
4377 else
4379 tree dst = CALL_EXPR_ARG (exp, 0);
4380 tree src = CALL_EXPR_ARG (exp, 1);
4381 const char *p = c_getstr (src);
4383 /* If the string length is zero, return the dst parameter. */
4384 if (p && *p == '\0')
4385 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4387 if (!optimize_size)
4389 /* See if we can store by pieces into (dst + strlen(dst)). */
4390 tree newsrc, newdst,
4391 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4392 rtx insns;
4394 /* Stabilize the argument list. */
4395 newsrc = builtin_save_expr (src);
4396 dst = builtin_save_expr (dst);
4398 start_sequence ();
4400 /* Create strlen (dst). */
4401 newdst = build_call_expr (strlen_fn, 1, dst);
4402 /* Create (dst + (cast) strlen (dst)). */
4403 newdst = fold_convert (TREE_TYPE (dst), newdst);
4404 newdst = fold_build2 (PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4406 newdst = builtin_save_expr (newdst);
4408 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4410 end_sequence (); /* Stop sequence. */
4411 return NULL_RTX;
4414 /* Output the entire sequence. */
4415 insns = get_insns ();
4416 end_sequence ();
4417 emit_insn (insns);
4419 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4422 return NULL_RTX;
4426 /* Expand expression EXP, which is a call to the strncat builtin.
4427 Return NULL_RTX if we failed the caller should emit a normal call,
4428 otherwise try to get the result in TARGET, if convenient. */
4430 static rtx
4431 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4433 if (validate_arglist (exp,
4434 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4436 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4437 CALL_EXPR_ARG (exp, 1),
4438 CALL_EXPR_ARG (exp, 2));
4439 if (result)
4440 return expand_expr (result, target, mode, EXPAND_NORMAL);
4442 return NULL_RTX;
4445 /* Expand expression EXP, which is a call to the strspn builtin.
4446 Return NULL_RTX if we failed the caller should emit a normal call,
4447 otherwise try to get the result in TARGET, if convenient. */
4449 static rtx
4450 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4452 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4454 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4455 CALL_EXPR_ARG (exp, 1));
4456 if (result)
4457 return expand_expr (result, target, mode, EXPAND_NORMAL);
4459 return NULL_RTX;
4462 /* Expand expression EXP, which is a call to the strcspn builtin.
4463 Return NULL_RTX if we failed the caller should emit a normal call,
4464 otherwise try to get the result in TARGET, if convenient. */
4466 static rtx
4467 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4469 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4471 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4472 CALL_EXPR_ARG (exp, 1));
4473 if (result)
4474 return expand_expr (result, target, mode, EXPAND_NORMAL);
4476 return NULL_RTX;
4479 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4480 if that's convenient. */
4483 expand_builtin_saveregs (void)
4485 rtx val, seq;
4487 /* Don't do __builtin_saveregs more than once in a function.
4488 Save the result of the first call and reuse it. */
4489 if (saveregs_value != 0)
4490 return saveregs_value;
4492 /* When this function is called, it means that registers must be
4493 saved on entry to this function. So we migrate the call to the
4494 first insn of this function. */
4496 start_sequence ();
4498 /* Do whatever the machine needs done in this case. */
4499 val = targetm.calls.expand_builtin_saveregs ();
4501 seq = get_insns ();
4502 end_sequence ();
4504 saveregs_value = val;
4506 /* Put the insns after the NOTE that starts the function. If this
4507 is inside a start_sequence, make the outer-level insn chain current, so
4508 the code is placed at the start of the function. */
4509 push_topmost_sequence ();
4510 emit_insn_after (seq, entry_of_function ());
4511 pop_topmost_sequence ();
4513 return val;
4516 /* __builtin_args_info (N) returns word N of the arg space info
4517 for the current function. The number and meanings of words
4518 is controlled by the definition of CUMULATIVE_ARGS. */
4520 static rtx
4521 expand_builtin_args_info (tree exp)
4523 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4524 int *word_ptr = (int *) &current_function_args_info;
4526 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4528 if (call_expr_nargs (exp) != 0)
4530 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4531 error ("argument of %<__builtin_args_info%> must be constant");
4532 else
4534 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4536 if (wordnum < 0 || wordnum >= nwords)
4537 error ("argument of %<__builtin_args_info%> out of range");
4538 else
4539 return GEN_INT (word_ptr[wordnum]);
4542 else
4543 error ("missing argument in %<__builtin_args_info%>");
4545 return const0_rtx;
4548 /* Expand a call to __builtin_next_arg. */
4550 static rtx
4551 expand_builtin_next_arg (void)
4553 /* Checking arguments is already done in fold_builtin_next_arg
4554 that must be called before this function. */
4555 return expand_binop (Pmode, add_optab,
4556 current_function_internal_arg_pointer,
4557 current_function_arg_offset_rtx,
4558 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4561 /* Make it easier for the backends by protecting the valist argument
4562 from multiple evaluations. */
4564 static tree
4565 stabilize_va_list (tree valist, int needs_lvalue)
4567 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4569 if (TREE_SIDE_EFFECTS (valist))
4570 valist = save_expr (valist);
4572 /* For this case, the backends will be expecting a pointer to
4573 TREE_TYPE (va_list_type_node), but it's possible we've
4574 actually been given an array (an actual va_list_type_node).
4575 So fix it. */
4576 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4578 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4579 valist = build_fold_addr_expr_with_type (valist, p1);
4582 else
4584 tree pt;
4586 if (! needs_lvalue)
4588 if (! TREE_SIDE_EFFECTS (valist))
4589 return valist;
4591 pt = build_pointer_type (va_list_type_node);
4592 valist = fold_build1 (ADDR_EXPR, pt, valist);
4593 TREE_SIDE_EFFECTS (valist) = 1;
4596 if (TREE_SIDE_EFFECTS (valist))
4597 valist = save_expr (valist);
4598 valist = build_fold_indirect_ref (valist);
4601 return valist;
4604 /* The "standard" definition of va_list is void*. */
4606 tree
4607 std_build_builtin_va_list (void)
4609 return ptr_type_node;
4612 /* The "standard" implementation of va_start: just assign `nextarg' to
4613 the variable. */
4615 void
4616 std_expand_builtin_va_start (tree valist, rtx nextarg)
4618 tree t;
4620 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
4621 make_tree (ptr_type_node, nextarg));
4622 TREE_SIDE_EFFECTS (t) = 1;
4624 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4627 /* Expand EXP, a call to __builtin_va_start. */
4629 static rtx
4630 expand_builtin_va_start (tree exp)
4632 rtx nextarg;
4633 tree valist;
4635 if (call_expr_nargs (exp) < 2)
4637 error ("too few arguments to function %<va_start%>");
4638 return const0_rtx;
4641 if (fold_builtin_next_arg (exp, true))
4642 return const0_rtx;
4644 nextarg = expand_builtin_next_arg ();
4645 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4647 #ifdef EXPAND_BUILTIN_VA_START
4648 EXPAND_BUILTIN_VA_START (valist, nextarg);
4649 #else
4650 std_expand_builtin_va_start (valist, nextarg);
4651 #endif
4653 return const0_rtx;
4656 /* The "standard" implementation of va_arg: read the value from the
4657 current (padded) address and increment by the (padded) size. */
4659 tree
4660 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4662 tree addr, t, type_size, rounded_size, valist_tmp;
4663 unsigned HOST_WIDE_INT align, boundary;
4664 bool indirect;
4666 #ifdef ARGS_GROW_DOWNWARD
4667 /* All of the alignment and movement below is for args-grow-up machines.
4668 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4669 implement their own specialized gimplify_va_arg_expr routines. */
4670 gcc_unreachable ();
4671 #endif
4673 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4674 if (indirect)
4675 type = build_pointer_type (type);
4677 align = PARM_BOUNDARY / BITS_PER_UNIT;
4678 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4680 /* Hoist the valist value into a temporary for the moment. */
4681 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4683 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4684 requires greater alignment, we must perform dynamic alignment. */
4685 if (boundary > align
4686 && !integer_zerop (TYPE_SIZE (type)))
4688 t = fold_convert (TREE_TYPE (valist), size_int (boundary - 1));
4689 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4690 build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t));
4691 gimplify_and_add (t, pre_p);
4693 t = fold_convert (TREE_TYPE (valist), size_int (-boundary));
4694 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4695 build2 (BIT_AND_EXPR, TREE_TYPE (valist), valist_tmp, t));
4696 gimplify_and_add (t, pre_p);
4698 else
4699 boundary = align;
4701 /* If the actual alignment is less than the alignment of the type,
4702 adjust the type accordingly so that we don't assume strict alignment
4703 when deferencing the pointer. */
4704 boundary *= BITS_PER_UNIT;
4705 if (boundary < TYPE_ALIGN (type))
4707 type = build_variant_type_copy (type);
4708 TYPE_ALIGN (type) = boundary;
4711 /* Compute the rounded size of the type. */
4712 type_size = size_in_bytes (type);
4713 rounded_size = round_up (type_size, align);
4715 /* Reduce rounded_size so it's sharable with the postqueue. */
4716 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4718 /* Get AP. */
4719 addr = valist_tmp;
4720 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4722 /* Small args are padded downward. */
4723 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4724 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4725 size_binop (MINUS_EXPR, rounded_size, type_size));
4726 t = fold_convert (TREE_TYPE (addr), t);
4727 addr = fold_build2 (PLUS_EXPR, TREE_TYPE (addr), addr, t);
4730 /* Compute new value for AP. */
4731 t = fold_convert (TREE_TYPE (valist), rounded_size);
4732 t = build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t);
4733 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4734 gimplify_and_add (t, pre_p);
4736 addr = fold_convert (build_pointer_type (type), addr);
4738 if (indirect)
4739 addr = build_va_arg_indirect_ref (addr);
4741 return build_va_arg_indirect_ref (addr);
4744 /* Build an indirect-ref expression over the given TREE, which represents a
4745 piece of a va_arg() expansion. */
4746 tree
4747 build_va_arg_indirect_ref (tree addr)
4749 addr = build_fold_indirect_ref (addr);
4751 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4752 mf_mark (addr);
4754 return addr;
4757 /* Return a dummy expression of type TYPE in order to keep going after an
4758 error. */
4760 static tree
4761 dummy_object (tree type)
4763 tree t = build_int_cst (build_pointer_type (type), 0);
4764 return build1 (INDIRECT_REF, type, t);
4767 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4768 builtin function, but a very special sort of operator. */
4770 enum gimplify_status
4771 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4773 tree promoted_type, want_va_type, have_va_type;
4774 tree valist = TREE_OPERAND (*expr_p, 0);
4775 tree type = TREE_TYPE (*expr_p);
4776 tree t;
4778 /* Verify that valist is of the proper type. */
4779 want_va_type = va_list_type_node;
4780 have_va_type = TREE_TYPE (valist);
4782 if (have_va_type == error_mark_node)
4783 return GS_ERROR;
4785 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4787 /* If va_list is an array type, the argument may have decayed
4788 to a pointer type, e.g. by being passed to another function.
4789 In that case, unwrap both types so that we can compare the
4790 underlying records. */
4791 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4792 || POINTER_TYPE_P (have_va_type))
4794 want_va_type = TREE_TYPE (want_va_type);
4795 have_va_type = TREE_TYPE (have_va_type);
4799 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4801 error ("first argument to %<va_arg%> not of type %<va_list%>");
4802 return GS_ERROR;
4805 /* Generate a diagnostic for requesting data of a type that cannot
4806 be passed through `...' due to type promotion at the call site. */
4807 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4808 != type)
4810 static bool gave_help;
4812 /* Unfortunately, this is merely undefined, rather than a constraint
4813 violation, so we cannot make this an error. If this call is never
4814 executed, the program is still strictly conforming. */
4815 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4816 type, promoted_type);
4817 if (! gave_help)
4819 gave_help = true;
4820 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4821 promoted_type, type);
4824 /* We can, however, treat "undefined" any way we please.
4825 Call abort to encourage the user to fix the program. */
4826 inform ("if this code is reached, the program will abort");
4827 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4828 append_to_statement_list (t, pre_p);
4830 /* This is dead code, but go ahead and finish so that the
4831 mode of the result comes out right. */
4832 *expr_p = dummy_object (type);
4833 return GS_ALL_DONE;
4835 else
4837 /* Make it easier for the backends by protecting the valist argument
4838 from multiple evaluations. */
4839 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4841 /* For this case, the backends will be expecting a pointer to
4842 TREE_TYPE (va_list_type_node), but it's possible we've
4843 actually been given an array (an actual va_list_type_node).
4844 So fix it. */
4845 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4847 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4848 valist = build_fold_addr_expr_with_type (valist, p1);
4850 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4852 else
4853 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4855 if (!targetm.gimplify_va_arg_expr)
4856 /* FIXME:Once most targets are converted we should merely
4857 assert this is non-null. */
4858 return GS_ALL_DONE;
4860 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4861 return GS_OK;
4865 /* Expand EXP, a call to __builtin_va_end. */
4867 static rtx
4868 expand_builtin_va_end (tree exp)
4870 tree valist = CALL_EXPR_ARG (exp, 0);
4872 /* Evaluate for side effects, if needed. I hate macros that don't
4873 do that. */
4874 if (TREE_SIDE_EFFECTS (valist))
4875 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4877 return const0_rtx;
4880 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4881 builtin rather than just as an assignment in stdarg.h because of the
4882 nastiness of array-type va_list types. */
4884 static rtx
4885 expand_builtin_va_copy (tree exp)
4887 tree dst, src, t;
4889 dst = CALL_EXPR_ARG (exp, 0);
4890 src = CALL_EXPR_ARG (exp, 1);
4892 dst = stabilize_va_list (dst, 1);
4893 src = stabilize_va_list (src, 0);
4895 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4897 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4898 TREE_SIDE_EFFECTS (t) = 1;
4899 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4901 else
4903 rtx dstb, srcb, size;
4905 /* Evaluate to pointers. */
4906 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4907 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4908 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4909 VOIDmode, EXPAND_NORMAL);
4911 dstb = convert_memory_address (Pmode, dstb);
4912 srcb = convert_memory_address (Pmode, srcb);
4914 /* "Dereference" to BLKmode memories. */
4915 dstb = gen_rtx_MEM (BLKmode, dstb);
4916 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4917 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4918 srcb = gen_rtx_MEM (BLKmode, srcb);
4919 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4920 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4922 /* Copy. */
4923 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4926 return const0_rtx;
4929 /* Expand a call to one of the builtin functions __builtin_frame_address or
4930 __builtin_return_address. */
4932 static rtx
4933 expand_builtin_frame_address (tree fndecl, tree exp)
4935 /* The argument must be a nonnegative integer constant.
4936 It counts the number of frames to scan up the stack.
4937 The value is the return address saved in that frame. */
4938 if (call_expr_nargs (exp) == 0)
4939 /* Warning about missing arg was already issued. */
4940 return const0_rtx;
4941 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4943 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4944 error ("invalid argument to %<__builtin_frame_address%>");
4945 else
4946 error ("invalid argument to %<__builtin_return_address%>");
4947 return const0_rtx;
4949 else
4951 rtx tem
4952 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4953 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4955 /* Some ports cannot access arbitrary stack frames. */
4956 if (tem == NULL)
4958 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4959 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4960 else
4961 warning (0, "unsupported argument to %<__builtin_return_address%>");
4962 return const0_rtx;
4965 /* For __builtin_frame_address, return what we've got. */
4966 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4967 return tem;
4969 if (!REG_P (tem)
4970 && ! CONSTANT_P (tem))
4971 tem = copy_to_mode_reg (Pmode, tem);
4972 return tem;
4976 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4977 we failed and the caller should emit a normal call, otherwise try to get
4978 the result in TARGET, if convenient. */
4980 static rtx
4981 expand_builtin_alloca (tree exp, rtx target)
4983 rtx op0;
4984 rtx result;
4986 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
4987 should always expand to function calls. These can be intercepted
4988 in libmudflap. */
4989 if (flag_mudflap)
4990 return NULL_RTX;
4992 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4993 return NULL_RTX;
4995 /* Compute the argument. */
4996 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4998 /* Allocate the desired space. */
4999 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5000 result = convert_memory_address (ptr_mode, result);
5002 return result;
5005 /* Expand a call to a bswap builtin with argument ARG0. MODE
5006 is the mode to expand with. */
5008 static rtx
5009 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5011 enum machine_mode mode;
5012 tree arg;
5013 rtx op0;
5015 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5016 return NULL_RTX;
5018 arg = CALL_EXPR_ARG (exp, 0);
5019 mode = TYPE_MODE (TREE_TYPE (arg));
5020 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
5022 target = expand_unop (mode, bswap_optab, op0, target, 1);
5024 gcc_assert (target);
5026 return convert_to_mode (mode, target, 0);
5029 /* Expand a call to a unary builtin in EXP.
5030 Return NULL_RTX if a normal call should be emitted rather than expanding the
5031 function in-line. If convenient, the result should be placed in TARGET.
5032 SUBTARGET may be used as the target for computing one of EXP's operands. */
5034 static rtx
5035 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5036 rtx subtarget, optab op_optab)
5038 rtx op0;
5040 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5041 return NULL_RTX;
5043 /* Compute the argument. */
5044 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget, VOIDmode, 0);
5045 /* Compute op, into TARGET if possible.
5046 Set TARGET to wherever the result comes back. */
5047 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5048 op_optab, op0, target, 1);
5049 gcc_assert (target);
5051 return convert_to_mode (target_mode, target, 0);
5054 /* If the string passed to fputs is a constant and is one character
5055 long, we attempt to transform this call into __builtin_fputc(). */
5057 static rtx
5058 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5060 /* Verify the arguments in the original call. */
5061 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5063 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5064 CALL_EXPR_ARG (exp, 1),
5065 (target == const0_rtx),
5066 unlocked, NULL_TREE);
5067 if (result)
5068 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5070 return NULL_RTX;
5073 /* Expand a call to __builtin_expect. We just return our argument
5074 as the builtin_expect semantic should've been already executed by
5075 tree branch prediction pass. */
5077 static rtx
5078 expand_builtin_expect (tree exp, rtx target)
5080 tree arg, c;
5082 if (call_expr_nargs (exp) < 2)
5083 return const0_rtx;
5084 arg = CALL_EXPR_ARG (exp, 0);
5085 c = CALL_EXPR_ARG (exp, 1);
5087 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5088 /* When guessing was done, the hints should be already stripped away. */
5089 gcc_assert (!flag_guess_branch_prob);
5090 return target;
5093 void
5094 expand_builtin_trap (void)
5096 #ifdef HAVE_trap
5097 if (HAVE_trap)
5098 emit_insn (gen_trap ());
5099 else
5100 #endif
5101 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5102 emit_barrier ();
5105 /* Expand EXP, a call to fabs, fabsf or fabsl.
5106 Return NULL_RTX if a normal call should be emitted rather than expanding
5107 the function inline. If convenient, the result should be placed
5108 in TARGET. SUBTARGET may be used as the target for computing
5109 the operand. */
5111 static rtx
5112 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5114 enum machine_mode mode;
5115 tree arg;
5116 rtx op0;
5118 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5119 return NULL_RTX;
5121 arg = CALL_EXPR_ARG (exp, 0);
5122 mode = TYPE_MODE (TREE_TYPE (arg));
5123 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
5124 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5127 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5128 Return NULL is a normal call should be emitted rather than expanding the
5129 function inline. If convenient, the result should be placed in TARGET.
5130 SUBTARGET may be used as the target for computing the operand. */
5132 static rtx
5133 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5135 rtx op0, op1;
5136 tree arg;
5138 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5139 return NULL_RTX;
5141 arg = CALL_EXPR_ARG (exp, 0);
5142 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5144 arg = CALL_EXPR_ARG (exp, 1);
5145 op1 = expand_normal (arg);
5147 return expand_copysign (op0, op1, target);
5150 /* Create a new constant string literal and return a char* pointer to it.
5151 The STRING_CST value is the LEN characters at STR. */
5152 tree
5153 build_string_literal (int len, const char *str)
5155 tree t, elem, index, type;
5157 t = build_string (len, str);
5158 elem = build_type_variant (char_type_node, 1, 0);
5159 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5160 type = build_array_type (elem, index);
5161 TREE_TYPE (t) = type;
5162 TREE_CONSTANT (t) = 1;
5163 TREE_INVARIANT (t) = 1;
5164 TREE_READONLY (t) = 1;
5165 TREE_STATIC (t) = 1;
5167 type = build_pointer_type (type);
5168 t = build1 (ADDR_EXPR, type, t);
5170 type = build_pointer_type (elem);
5171 t = build1 (NOP_EXPR, type, t);
5172 return t;
5175 /* Expand EXP, a call to printf or printf_unlocked.
5176 Return NULL_RTX if a normal call should be emitted rather than transforming
5177 the function inline. If convenient, the result should be placed in
5178 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5179 call. */
5180 static rtx
5181 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5182 bool unlocked)
5184 /* If we're using an unlocked function, assume the other unlocked
5185 functions exist explicitly. */
5186 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5187 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5188 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5189 : implicit_built_in_decls[BUILT_IN_PUTS];
5190 const char *fmt_str;
5191 tree fn = 0;
5192 tree fmt, arg;
5193 int nargs = call_expr_nargs (exp);
5195 /* If the return value is used, don't do the transformation. */
5196 if (target != const0_rtx)
5197 return NULL_RTX;
5199 /* Verify the required arguments in the original call. */
5200 if (nargs == 0)
5201 return NULL_RTX;
5202 fmt = CALL_EXPR_ARG (exp, 0);
5203 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5204 return NULL_RTX;
5206 /* Check whether the format is a literal string constant. */
5207 fmt_str = c_getstr (fmt);
5208 if (fmt_str == NULL)
5209 return NULL_RTX;
5211 if (!init_target_chars ())
5212 return NULL_RTX;
5214 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5215 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5217 if ((nargs != 2)
5218 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5219 return NULL_RTX;
5220 if (fn_puts)
5221 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5223 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5224 else if (strcmp (fmt_str, target_percent_c) == 0)
5226 if ((nargs != 2)
5227 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5228 return NULL_RTX;
5229 if (fn_putchar)
5230 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5232 else
5234 /* We can't handle anything else with % args or %% ... yet. */
5235 if (strchr (fmt_str, target_percent))
5236 return NULL_RTX;
5238 if (nargs > 1)
5239 return NULL_RTX;
5241 /* If the format specifier was "", printf does nothing. */
5242 if (fmt_str[0] == '\0')
5243 return const0_rtx;
5244 /* If the format specifier has length of 1, call putchar. */
5245 if (fmt_str[1] == '\0')
5247 /* Given printf("c"), (where c is any one character,)
5248 convert "c"[0] to an int and pass that to the replacement
5249 function. */
5250 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5251 if (fn_putchar)
5252 fn = build_call_expr (fn_putchar, 1, arg);
5254 else
5256 /* If the format specifier was "string\n", call puts("string"). */
5257 size_t len = strlen (fmt_str);
5258 if ((unsigned char)fmt_str[len - 1] == target_newline)
5260 /* Create a NUL-terminated string that's one char shorter
5261 than the original, stripping off the trailing '\n'. */
5262 char *newstr = alloca (len);
5263 memcpy (newstr, fmt_str, len - 1);
5264 newstr[len - 1] = 0;
5265 arg = build_string_literal (len, newstr);
5266 if (fn_puts)
5267 fn = build_call_expr (fn_puts, 1, arg);
5269 else
5270 /* We'd like to arrange to call fputs(string,stdout) here,
5271 but we need stdout and don't have a way to get it yet. */
5272 return NULL_RTX;
5276 if (!fn)
5277 return NULL_RTX;
5278 if (TREE_CODE (fn) == CALL_EXPR)
5279 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5280 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5283 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5284 Return NULL_RTX if a normal call should be emitted rather than transforming
5285 the function inline. If convenient, the result should be placed in
5286 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5287 call. */
5288 static rtx
5289 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5290 bool unlocked)
5292 /* If we're using an unlocked function, assume the other unlocked
5293 functions exist explicitly. */
5294 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5295 : implicit_built_in_decls[BUILT_IN_FPUTC];
5296 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5297 : implicit_built_in_decls[BUILT_IN_FPUTS];
5298 const char *fmt_str;
5299 tree fn = 0;
5300 tree fmt, fp, arg;
5301 int nargs = call_expr_nargs (exp);
5303 /* If the return value is used, don't do the transformation. */
5304 if (target != const0_rtx)
5305 return NULL_RTX;
5307 /* Verify the required arguments in the original call. */
5308 if (nargs < 2)
5309 return NULL_RTX;
5310 fp = CALL_EXPR_ARG (exp, 0);
5311 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5312 return NULL_RTX;
5313 fmt = CALL_EXPR_ARG (exp, 1);
5314 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5315 return NULL_RTX;
5317 /* Check whether the format is a literal string constant. */
5318 fmt_str = c_getstr (fmt);
5319 if (fmt_str == NULL)
5320 return NULL_RTX;
5322 if (!init_target_chars ())
5323 return NULL_RTX;
5325 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5326 if (strcmp (fmt_str, target_percent_s) == 0)
5328 if ((nargs != 3)
5329 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5330 return NULL_RTX;
5331 arg = CALL_EXPR_ARG (exp, 2);
5332 if (fn_fputs)
5333 fn = build_call_expr (fn_fputs, 2, arg, fp);
5335 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5336 else if (strcmp (fmt_str, target_percent_c) == 0)
5338 if ((nargs != 3)
5339 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5340 return NULL_RTX;
5341 arg = CALL_EXPR_ARG (exp, 2);
5342 if (fn_fputc)
5343 fn = build_call_expr (fn_fputc, 2, arg, fp);
5345 else
5347 /* We can't handle anything else with % args or %% ... yet. */
5348 if (strchr (fmt_str, target_percent))
5349 return NULL_RTX;
5351 if (nargs > 2)
5352 return NULL_RTX;
5354 /* If the format specifier was "", fprintf does nothing. */
5355 if (fmt_str[0] == '\0')
5357 /* Evaluate and ignore FILE* argument for side-effects. */
5358 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5359 return const0_rtx;
5362 /* When "string" doesn't contain %, replace all cases of
5363 fprintf(stream,string) with fputs(string,stream). The fputs
5364 builtin will take care of special cases like length == 1. */
5365 if (fn_fputs)
5366 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5369 if (!fn)
5370 return NULL_RTX;
5371 if (TREE_CODE (fn) == CALL_EXPR)
5372 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5373 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5376 /* Expand a call EXP to sprintf. Return NULL_RTX if
5377 a normal call should be emitted rather than expanding the function
5378 inline. If convenient, the result should be placed in TARGET with
5379 mode MODE. */
5381 static rtx
5382 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5384 tree dest, fmt;
5385 const char *fmt_str;
5386 int nargs = call_expr_nargs (exp);
5388 /* Verify the required arguments in the original call. */
5389 if (nargs < 2)
5390 return NULL_RTX;
5391 dest = CALL_EXPR_ARG (exp, 0);
5392 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5393 return NULL_RTX;
5394 fmt = CALL_EXPR_ARG (exp, 0);
5395 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5396 return NULL_RTX;
5398 /* Check whether the format is a literal string constant. */
5399 fmt_str = c_getstr (fmt);
5400 if (fmt_str == NULL)
5401 return NULL_RTX;
5403 if (!init_target_chars ())
5404 return NULL_RTX;
5406 /* If the format doesn't contain % args or %%, use strcpy. */
5407 if (strchr (fmt_str, target_percent) == 0)
5409 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5410 tree exp;
5412 if ((nargs > 2) || ! fn)
5413 return NULL_RTX;
5414 expand_expr (build_call_expr (fn, 2, dest, fmt),
5415 const0_rtx, VOIDmode, EXPAND_NORMAL);
5416 if (target == const0_rtx)
5417 return const0_rtx;
5418 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5419 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5421 /* If the format is "%s", use strcpy if the result isn't used. */
5422 else if (strcmp (fmt_str, target_percent_s) == 0)
5424 tree fn, arg, len;
5425 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5427 if (! fn)
5428 return NULL_RTX;
5429 if (nargs != 3)
5430 return NULL_RTX;
5431 arg = CALL_EXPR_ARG (exp, 2);
5432 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5433 return NULL_RTX;
5435 if (target != const0_rtx)
5437 len = c_strlen (arg, 1);
5438 if (! len || TREE_CODE (len) != INTEGER_CST)
5439 return NULL_RTX;
5441 else
5442 len = NULL_TREE;
5444 expand_expr (build_call_expr (fn, 2, dest, arg),
5445 const0_rtx, VOIDmode, EXPAND_NORMAL);
5447 if (target == const0_rtx)
5448 return const0_rtx;
5449 return expand_expr (len, target, mode, EXPAND_NORMAL);
5452 return NULL_RTX;
5455 /* Expand a call to either the entry or exit function profiler. */
5457 static rtx
5458 expand_builtin_profile_func (bool exitp)
5460 rtx this, which;
5462 this = DECL_RTL (current_function_decl);
5463 gcc_assert (MEM_P (this));
5464 this = XEXP (this, 0);
5466 if (exitp)
5467 which = profile_function_exit_libfunc;
5468 else
5469 which = profile_function_entry_libfunc;
5471 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5472 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5474 Pmode);
5476 return const0_rtx;
5479 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5481 static rtx
5482 round_trampoline_addr (rtx tramp)
5484 rtx temp, addend, mask;
5486 /* If we don't need too much alignment, we'll have been guaranteed
5487 proper alignment by get_trampoline_type. */
5488 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5489 return tramp;
5491 /* Round address up to desired boundary. */
5492 temp = gen_reg_rtx (Pmode);
5493 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5494 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5496 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5497 temp, 0, OPTAB_LIB_WIDEN);
5498 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5499 temp, 0, OPTAB_LIB_WIDEN);
5501 return tramp;
5504 static rtx
5505 expand_builtin_init_trampoline (tree exp)
5507 tree t_tramp, t_func, t_chain;
5508 rtx r_tramp, r_func, r_chain;
5509 #ifdef TRAMPOLINE_TEMPLATE
5510 rtx blktramp;
5511 #endif
5513 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5514 POINTER_TYPE, VOID_TYPE))
5515 return NULL_RTX;
5517 t_tramp = CALL_EXPR_ARG (exp, 0);
5518 t_func = CALL_EXPR_ARG (exp, 1);
5519 t_chain = CALL_EXPR_ARG (exp, 2);
5521 r_tramp = expand_normal (t_tramp);
5522 r_func = expand_normal (t_func);
5523 r_chain = expand_normal (t_chain);
5525 /* Generate insns to initialize the trampoline. */
5526 r_tramp = round_trampoline_addr (r_tramp);
5527 #ifdef TRAMPOLINE_TEMPLATE
5528 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5529 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5530 emit_block_move (blktramp, assemble_trampoline_template (),
5531 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5532 #endif
5533 trampolines_created = 1;
5534 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5536 return const0_rtx;
5539 static rtx
5540 expand_builtin_adjust_trampoline (tree exp)
5542 rtx tramp;
5544 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5545 return NULL_RTX;
5547 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5548 tramp = round_trampoline_addr (tramp);
5549 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5550 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5551 #endif
5553 return tramp;
5556 /* Expand a call to the built-in signbit, signbitf or signbitl function.
5557 Return NULL_RTX if a normal call should be emitted rather than expanding
5558 the function in-line. EXP is the expression that is a call to the builtin
5559 function; if convenient, the result should be placed in TARGET. */
5561 static rtx
5562 expand_builtin_signbit (tree exp, rtx target)
5564 const struct real_format *fmt;
5565 enum machine_mode fmode, imode, rmode;
5566 HOST_WIDE_INT hi, lo;
5567 tree arg;
5568 int word, bitpos;
5569 rtx temp;
5571 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5572 return NULL_RTX;
5574 arg = CALL_EXPR_ARG (exp, 0);
5575 fmode = TYPE_MODE (TREE_TYPE (arg));
5576 rmode = TYPE_MODE (TREE_TYPE (exp));
5577 fmt = REAL_MODE_FORMAT (fmode);
5579 /* For floating point formats without a sign bit, implement signbit
5580 as "ARG < 0.0". */
5581 bitpos = fmt->signbit_ro;
5582 if (bitpos < 0)
5584 /* But we can't do this if the format supports signed zero. */
5585 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5586 return NULL_RTX;
5588 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5589 build_real (TREE_TYPE (arg), dconst0));
5590 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5593 temp = expand_normal (arg);
5594 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5596 imode = int_mode_for_mode (fmode);
5597 if (imode == BLKmode)
5598 return NULL_RTX;
5599 temp = gen_lowpart (imode, temp);
5601 else
5603 imode = word_mode;
5604 /* Handle targets with different FP word orders. */
5605 if (FLOAT_WORDS_BIG_ENDIAN)
5606 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5607 else
5608 word = bitpos / BITS_PER_WORD;
5609 temp = operand_subword_force (temp, word, fmode);
5610 bitpos = bitpos % BITS_PER_WORD;
5613 /* Force the intermediate word_mode (or narrower) result into a
5614 register. This avoids attempting to create paradoxical SUBREGs
5615 of floating point modes below. */
5616 temp = force_reg (imode, temp);
5618 /* If the bitpos is within the "result mode" lowpart, the operation
5619 can be implement with a single bitwise AND. Otherwise, we need
5620 a right shift and an AND. */
5622 if (bitpos < GET_MODE_BITSIZE (rmode))
5624 if (bitpos < HOST_BITS_PER_WIDE_INT)
5626 hi = 0;
5627 lo = (HOST_WIDE_INT) 1 << bitpos;
5629 else
5631 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5632 lo = 0;
5635 if (imode != rmode)
5636 temp = gen_lowpart (rmode, temp);
5637 temp = expand_binop (rmode, and_optab, temp,
5638 immed_double_const (lo, hi, rmode),
5639 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5641 else
5643 /* Perform a logical right shift to place the signbit in the least
5644 significant bit, then truncate the result to the desired mode
5645 and mask just this bit. */
5646 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5647 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5648 temp = gen_lowpart (rmode, temp);
5649 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5650 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5653 return temp;
5656 /* Expand fork or exec calls. TARGET is the desired target of the
5657 call. EXP is the call. FN is the
5658 identificator of the actual function. IGNORE is nonzero if the
5659 value is to be ignored. */
5661 static rtx
5662 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5664 tree id, decl;
5665 tree call;
5667 /* If we are not profiling, just call the function. */
5668 if (!profile_arc_flag)
5669 return NULL_RTX;
5671 /* Otherwise call the wrapper. This should be equivalent for the rest of
5672 compiler, so the code does not diverge, and the wrapper may run the
5673 code necessary for keeping the profiling sane. */
5675 switch (DECL_FUNCTION_CODE (fn))
5677 case BUILT_IN_FORK:
5678 id = get_identifier ("__gcov_fork");
5679 break;
5681 case BUILT_IN_EXECL:
5682 id = get_identifier ("__gcov_execl");
5683 break;
5685 case BUILT_IN_EXECV:
5686 id = get_identifier ("__gcov_execv");
5687 break;
5689 case BUILT_IN_EXECLP:
5690 id = get_identifier ("__gcov_execlp");
5691 break;
5693 case BUILT_IN_EXECLE:
5694 id = get_identifier ("__gcov_execle");
5695 break;
5697 case BUILT_IN_EXECVP:
5698 id = get_identifier ("__gcov_execvp");
5699 break;
5701 case BUILT_IN_EXECVE:
5702 id = get_identifier ("__gcov_execve");
5703 break;
5705 default:
5706 gcc_unreachable ();
5709 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5710 DECL_EXTERNAL (decl) = 1;
5711 TREE_PUBLIC (decl) = 1;
5712 DECL_ARTIFICIAL (decl) = 1;
5713 TREE_NOTHROW (decl) = 1;
5714 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5715 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5716 call = rewrite_call_expr (exp, 0, decl, 0);
5717 return expand_call (call, target, ignore);
5722 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5723 the pointer in these functions is void*, the tree optimizers may remove
5724 casts. The mode computed in expand_builtin isn't reliable either, due
5725 to __sync_bool_compare_and_swap.
5727 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5728 group of builtins. This gives us log2 of the mode size. */
5730 static inline enum machine_mode
5731 get_builtin_sync_mode (int fcode_diff)
5733 /* The size is not negotiable, so ask not to get BLKmode in return
5734 if the target indicates that a smaller size would be better. */
5735 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5738 /* Expand the memory expression LOC and return the appropriate memory operand
5739 for the builtin_sync operations. */
5741 static rtx
5742 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5744 rtx addr, mem;
5746 addr = expand_expr (loc, NULL, Pmode, EXPAND_SUM);
5748 /* Note that we explicitly do not want any alias information for this
5749 memory, so that we kill all other live memories. Otherwise we don't
5750 satisfy the full barrier semantics of the intrinsic. */
5751 mem = validize_mem (gen_rtx_MEM (mode, addr));
5753 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5754 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5755 MEM_VOLATILE_P (mem) = 1;
5757 return mem;
5760 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5761 EXP is the CALL_EXPR. CODE is the rtx code
5762 that corresponds to the arithmetic or logical operation from the name;
5763 an exception here is that NOT actually means NAND. TARGET is an optional
5764 place for us to store the results; AFTER is true if this is the
5765 fetch_and_xxx form. IGNORE is true if we don't actually care about
5766 the result of the operation at all. */
5768 static rtx
5769 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5770 enum rtx_code code, bool after,
5771 rtx target, bool ignore)
5773 rtx val, mem;
5774 enum machine_mode old_mode;
5776 /* Expand the operands. */
5777 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5779 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL, mode, EXPAND_NORMAL);
5780 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5781 of CONST_INTs, where we know the old_mode only from the call argument. */
5782 old_mode = GET_MODE (val);
5783 if (old_mode == VOIDmode)
5784 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5785 val = convert_modes (mode, old_mode, val, 1);
5787 if (ignore)
5788 return expand_sync_operation (mem, val, code);
5789 else
5790 return expand_sync_fetch_operation (mem, val, code, after, target);
5793 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5794 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5795 true if this is the boolean form. TARGET is a place for us to store the
5796 results; this is NOT optional if IS_BOOL is true. */
5798 static rtx
5799 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5800 bool is_bool, rtx target)
5802 rtx old_val, new_val, mem;
5803 enum machine_mode old_mode;
5805 /* Expand the operands. */
5806 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5809 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL, mode, EXPAND_NORMAL);
5810 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5811 of CONST_INTs, where we know the old_mode only from the call argument. */
5812 old_mode = GET_MODE (old_val);
5813 if (old_mode == VOIDmode)
5814 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5815 old_val = convert_modes (mode, old_mode, old_val, 1);
5817 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL, mode, EXPAND_NORMAL);
5818 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5819 of CONST_INTs, where we know the old_mode only from the call argument. */
5820 old_mode = GET_MODE (new_val);
5821 if (old_mode == VOIDmode)
5822 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5823 new_val = convert_modes (mode, old_mode, new_val, 1);
5825 if (is_bool)
5826 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5827 else
5828 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5831 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5832 general form is actually an atomic exchange, and some targets only
5833 support a reduced form with the second argument being a constant 1.
5834 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5835 the results. */
5837 static rtx
5838 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5839 rtx target)
5841 rtx val, mem;
5842 enum machine_mode old_mode;
5844 /* Expand the operands. */
5845 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5846 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL, mode, EXPAND_NORMAL);
5847 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5848 of CONST_INTs, where we know the old_mode only from the call argument. */
5849 old_mode = GET_MODE (val);
5850 if (old_mode == VOIDmode)
5851 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5852 val = convert_modes (mode, old_mode, val, 1);
5854 return expand_sync_lock_test_and_set (mem, val, target);
5857 /* Expand the __sync_synchronize intrinsic. */
5859 static void
5860 expand_builtin_synchronize (void)
5862 tree x;
5864 #ifdef HAVE_memory_barrier
5865 if (HAVE_memory_barrier)
5867 emit_insn (gen_memory_barrier ());
5868 return;
5870 #endif
5872 /* If no explicit memory barrier instruction is available, create an
5873 empty asm stmt with a memory clobber. */
5874 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
5875 tree_cons (NULL, build_string (6, "memory"), NULL));
5876 ASM_VOLATILE_P (x) = 1;
5877 expand_asm_expr (x);
5880 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5882 static void
5883 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5885 enum insn_code icode;
5886 rtx mem, insn;
5887 rtx val = const0_rtx;
5889 /* Expand the operands. */
5890 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5892 /* If there is an explicit operation in the md file, use it. */
5893 icode = sync_lock_release[mode];
5894 if (icode != CODE_FOR_nothing)
5896 if (!insn_data[icode].operand[1].predicate (val, mode))
5897 val = force_reg (mode, val);
5899 insn = GEN_FCN (icode) (mem, val);
5900 if (insn)
5902 emit_insn (insn);
5903 return;
5907 /* Otherwise we can implement this operation by emitting a barrier
5908 followed by a store of zero. */
5909 expand_builtin_synchronize ();
5910 emit_move_insn (mem, val);
5913 /* Expand an expression EXP that calls a built-in function,
5914 with result going to TARGET if that's convenient
5915 (and in mode MODE if that's convenient).
5916 SUBTARGET may be used as the target for computing one of EXP's operands.
5917 IGNORE is nonzero if the value is to be ignored. */
5920 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5921 int ignore)
5923 tree fndecl = get_callee_fndecl (exp);
5924 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5925 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5927 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5928 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5930 /* When not optimizing, generate calls to library functions for a certain
5931 set of builtins. */
5932 if (!optimize
5933 && !called_as_built_in (fndecl)
5934 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5935 && fcode != BUILT_IN_ALLOCA)
5936 return expand_call (exp, target, ignore);
5938 /* The built-in function expanders test for target == const0_rtx
5939 to determine whether the function's result will be ignored. */
5940 if (ignore)
5941 target = const0_rtx;
5943 /* If the result of a pure or const built-in function is ignored, and
5944 none of its arguments are volatile, we can avoid expanding the
5945 built-in call and just evaluate the arguments for side-effects. */
5946 if (target == const0_rtx
5947 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
5949 bool volatilep = false;
5950 tree arg;
5951 call_expr_arg_iterator iter;
5953 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5954 if (TREE_THIS_VOLATILE (arg))
5956 volatilep = true;
5957 break;
5960 if (! volatilep)
5962 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5963 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5964 return const0_rtx;
5968 switch (fcode)
5970 CASE_FLT_FN (BUILT_IN_FABS):
5971 target = expand_builtin_fabs (exp, target, subtarget);
5972 if (target)
5973 return target;
5974 break;
5976 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5977 target = expand_builtin_copysign (exp, target, subtarget);
5978 if (target)
5979 return target;
5980 break;
5982 /* Just do a normal library call if we were unable to fold
5983 the values. */
5984 CASE_FLT_FN (BUILT_IN_CABS):
5985 break;
5987 CASE_FLT_FN (BUILT_IN_EXP):
5988 CASE_FLT_FN (BUILT_IN_EXP10):
5989 CASE_FLT_FN (BUILT_IN_POW10):
5990 CASE_FLT_FN (BUILT_IN_EXP2):
5991 CASE_FLT_FN (BUILT_IN_EXPM1):
5992 CASE_FLT_FN (BUILT_IN_LOGB):
5993 CASE_FLT_FN (BUILT_IN_LOG):
5994 CASE_FLT_FN (BUILT_IN_LOG10):
5995 CASE_FLT_FN (BUILT_IN_LOG2):
5996 CASE_FLT_FN (BUILT_IN_LOG1P):
5997 CASE_FLT_FN (BUILT_IN_TAN):
5998 CASE_FLT_FN (BUILT_IN_ASIN):
5999 CASE_FLT_FN (BUILT_IN_ACOS):
6000 CASE_FLT_FN (BUILT_IN_ATAN):
6001 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6002 because of possible accuracy problems. */
6003 if (! flag_unsafe_math_optimizations)
6004 break;
6005 CASE_FLT_FN (BUILT_IN_SQRT):
6006 CASE_FLT_FN (BUILT_IN_FLOOR):
6007 CASE_FLT_FN (BUILT_IN_CEIL):
6008 CASE_FLT_FN (BUILT_IN_TRUNC):
6009 CASE_FLT_FN (BUILT_IN_ROUND):
6010 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6011 CASE_FLT_FN (BUILT_IN_RINT):
6012 target = expand_builtin_mathfn (exp, target, subtarget);
6013 if (target)
6014 return target;
6015 break;
6017 CASE_FLT_FN (BUILT_IN_ILOGB):
6018 if (! flag_unsafe_math_optimizations)
6019 break;
6020 CASE_FLT_FN (BUILT_IN_ISINF):
6021 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6022 if (target)
6023 return target;
6024 break;
6026 CASE_FLT_FN (BUILT_IN_LCEIL):
6027 CASE_FLT_FN (BUILT_IN_LLCEIL):
6028 CASE_FLT_FN (BUILT_IN_LFLOOR):
6029 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6030 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6031 if (target)
6032 return target;
6033 break;
6035 CASE_FLT_FN (BUILT_IN_LRINT):
6036 CASE_FLT_FN (BUILT_IN_LLRINT):
6037 CASE_FLT_FN (BUILT_IN_LROUND):
6038 CASE_FLT_FN (BUILT_IN_LLROUND):
6039 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6040 if (target)
6041 return target;
6042 break;
6044 CASE_FLT_FN (BUILT_IN_POW):
6045 target = expand_builtin_pow (exp, target, subtarget);
6046 if (target)
6047 return target;
6048 break;
6050 CASE_FLT_FN (BUILT_IN_POWI):
6051 target = expand_builtin_powi (exp, target, subtarget);
6052 if (target)
6053 return target;
6054 break;
6056 CASE_FLT_FN (BUILT_IN_ATAN2):
6057 CASE_FLT_FN (BUILT_IN_LDEXP):
6058 CASE_FLT_FN (BUILT_IN_SCALB):
6059 CASE_FLT_FN (BUILT_IN_SCALBN):
6060 CASE_FLT_FN (BUILT_IN_SCALBLN):
6061 if (! flag_unsafe_math_optimizations)
6062 break;
6064 CASE_FLT_FN (BUILT_IN_FMOD):
6065 CASE_FLT_FN (BUILT_IN_REMAINDER):
6066 CASE_FLT_FN (BUILT_IN_DREM):
6067 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6068 if (target)
6069 return target;
6070 break;
6072 CASE_FLT_FN (BUILT_IN_CEXPI):
6073 target = expand_builtin_cexpi (exp, target, subtarget);
6074 gcc_assert (target);
6075 return target;
6077 CASE_FLT_FN (BUILT_IN_SIN):
6078 CASE_FLT_FN (BUILT_IN_COS):
6079 if (! flag_unsafe_math_optimizations)
6080 break;
6081 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6082 if (target)
6083 return target;
6084 break;
6086 CASE_FLT_FN (BUILT_IN_SINCOS):
6087 if (! flag_unsafe_math_optimizations)
6088 break;
6089 target = expand_builtin_sincos (exp);
6090 if (target)
6091 return target;
6092 break;
6094 case BUILT_IN_APPLY_ARGS:
6095 return expand_builtin_apply_args ();
6097 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6098 FUNCTION with a copy of the parameters described by
6099 ARGUMENTS, and ARGSIZE. It returns a block of memory
6100 allocated on the stack into which is stored all the registers
6101 that might possibly be used for returning the result of a
6102 function. ARGUMENTS is the value returned by
6103 __builtin_apply_args. ARGSIZE is the number of bytes of
6104 arguments that must be copied. ??? How should this value be
6105 computed? We'll also need a safe worst case value for varargs
6106 functions. */
6107 case BUILT_IN_APPLY:
6108 if (!validate_arglist (exp, POINTER_TYPE,
6109 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6110 && !validate_arglist (exp, REFERENCE_TYPE,
6111 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6112 return const0_rtx;
6113 else
6115 rtx ops[3];
6117 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6118 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6119 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6121 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6124 /* __builtin_return (RESULT) causes the function to return the
6125 value described by RESULT. RESULT is address of the block of
6126 memory returned by __builtin_apply. */
6127 case BUILT_IN_RETURN:
6128 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6129 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6130 return const0_rtx;
6132 case BUILT_IN_SAVEREGS:
6133 return expand_builtin_saveregs ();
6135 case BUILT_IN_ARGS_INFO:
6136 return expand_builtin_args_info (exp);
6138 /* Return the address of the first anonymous stack arg. */
6139 case BUILT_IN_NEXT_ARG:
6140 if (fold_builtin_next_arg (exp, false))
6141 return const0_rtx;
6142 return expand_builtin_next_arg ();
6144 case BUILT_IN_CLASSIFY_TYPE:
6145 return expand_builtin_classify_type (exp);
6147 case BUILT_IN_CONSTANT_P:
6148 return const0_rtx;
6150 case BUILT_IN_FRAME_ADDRESS:
6151 case BUILT_IN_RETURN_ADDRESS:
6152 return expand_builtin_frame_address (fndecl, exp);
6154 /* Returns the address of the area where the structure is returned.
6155 0 otherwise. */
6156 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6157 if (call_expr_nargs (exp) != 0
6158 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6159 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6160 return const0_rtx;
6161 else
6162 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6164 case BUILT_IN_ALLOCA:
6165 target = expand_builtin_alloca (exp, target);
6166 if (target)
6167 return target;
6168 break;
6170 case BUILT_IN_STACK_SAVE:
6171 return expand_stack_save ();
6173 case BUILT_IN_STACK_RESTORE:
6174 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6175 return const0_rtx;
6177 case BUILT_IN_BSWAP32:
6178 case BUILT_IN_BSWAP64:
6179 target = expand_builtin_bswap (exp, target, subtarget);
6181 if (target)
6182 return target;
6183 break;
6185 CASE_INT_FN (BUILT_IN_FFS):
6186 case BUILT_IN_FFSIMAX:
6187 target = expand_builtin_unop (target_mode, exp, target,
6188 subtarget, ffs_optab);
6189 if (target)
6190 return target;
6191 break;
6193 CASE_INT_FN (BUILT_IN_CLZ):
6194 case BUILT_IN_CLZIMAX:
6195 target = expand_builtin_unop (target_mode, exp, target,
6196 subtarget, clz_optab);
6197 if (target)
6198 return target;
6199 break;
6201 CASE_INT_FN (BUILT_IN_CTZ):
6202 case BUILT_IN_CTZIMAX:
6203 target = expand_builtin_unop (target_mode, exp, target,
6204 subtarget, ctz_optab);
6205 if (target)
6206 return target;
6207 break;
6209 CASE_INT_FN (BUILT_IN_POPCOUNT):
6210 case BUILT_IN_POPCOUNTIMAX:
6211 target = expand_builtin_unop (target_mode, exp, target,
6212 subtarget, popcount_optab);
6213 if (target)
6214 return target;
6215 break;
6217 CASE_INT_FN (BUILT_IN_PARITY):
6218 case BUILT_IN_PARITYIMAX:
6219 target = expand_builtin_unop (target_mode, exp, target,
6220 subtarget, parity_optab);
6221 if (target)
6222 return target;
6223 break;
6225 case BUILT_IN_STRLEN:
6226 target = expand_builtin_strlen (exp, target, target_mode);
6227 if (target)
6228 return target;
6229 break;
6231 case BUILT_IN_STRCPY:
6232 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6233 if (target)
6234 return target;
6235 break;
6237 case BUILT_IN_STRNCPY:
6238 target = expand_builtin_strncpy (exp, target, mode);
6239 if (target)
6240 return target;
6241 break;
6243 case BUILT_IN_STPCPY:
6244 target = expand_builtin_stpcpy (exp, target, mode);
6245 if (target)
6246 return target;
6247 break;
6249 case BUILT_IN_STRCAT:
6250 target = expand_builtin_strcat (fndecl, exp, target, mode);
6251 if (target)
6252 return target;
6253 break;
6255 case BUILT_IN_STRNCAT:
6256 target = expand_builtin_strncat (exp, target, mode);
6257 if (target)
6258 return target;
6259 break;
6261 case BUILT_IN_STRSPN:
6262 target = expand_builtin_strspn (exp, target, mode);
6263 if (target)
6264 return target;
6265 break;
6267 case BUILT_IN_STRCSPN:
6268 target = expand_builtin_strcspn (exp, target, mode);
6269 if (target)
6270 return target;
6271 break;
6273 case BUILT_IN_STRSTR:
6274 target = expand_builtin_strstr (exp, target, mode);
6275 if (target)
6276 return target;
6277 break;
6279 case BUILT_IN_STRPBRK:
6280 target = expand_builtin_strpbrk (exp, target, mode);
6281 if (target)
6282 return target;
6283 break;
6285 case BUILT_IN_INDEX:
6286 case BUILT_IN_STRCHR:
6287 target = expand_builtin_strchr (exp, target, mode);
6288 if (target)
6289 return target;
6290 break;
6292 case BUILT_IN_RINDEX:
6293 case BUILT_IN_STRRCHR:
6294 target = expand_builtin_strrchr (exp, target, mode);
6295 if (target)
6296 return target;
6297 break;
6299 case BUILT_IN_MEMCPY:
6300 target = expand_builtin_memcpy (exp, target, mode);
6301 if (target)
6302 return target;
6303 break;
6305 case BUILT_IN_MEMPCPY:
6306 target = expand_builtin_mempcpy (exp, target, mode);
6307 if (target)
6308 return target;
6309 break;
6311 case BUILT_IN_MEMMOVE:
6312 target = expand_builtin_memmove (exp, target, mode, ignore);
6313 if (target)
6314 return target;
6315 break;
6317 case BUILT_IN_BCOPY:
6318 target = expand_builtin_bcopy (exp, ignore);
6319 if (target)
6320 return target;
6321 break;
6323 case BUILT_IN_MEMSET:
6324 target = expand_builtin_memset (exp, target, mode);
6325 if (target)
6326 return target;
6327 break;
6329 case BUILT_IN_BZERO:
6330 target = expand_builtin_bzero (exp);
6331 if (target)
6332 return target;
6333 break;
6335 case BUILT_IN_STRCMP:
6336 target = expand_builtin_strcmp (exp, target, mode);
6337 if (target)
6338 return target;
6339 break;
6341 case BUILT_IN_STRNCMP:
6342 target = expand_builtin_strncmp (exp, target, mode);
6343 if (target)
6344 return target;
6345 break;
6347 case BUILT_IN_BCMP:
6348 case BUILT_IN_MEMCMP:
6349 target = expand_builtin_memcmp (exp, target, mode);
6350 if (target)
6351 return target;
6352 break;
6354 case BUILT_IN_SETJMP:
6355 /* This should have been lowered to the builtins below. */
6356 gcc_unreachable ();
6358 case BUILT_IN_SETJMP_SETUP:
6359 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6360 and the receiver label. */
6361 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6363 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6364 VOIDmode, EXPAND_NORMAL);
6365 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6366 rtx label_r = label_rtx (label);
6368 /* This is copied from the handling of non-local gotos. */
6369 expand_builtin_setjmp_setup (buf_addr, label_r);
6370 nonlocal_goto_handler_labels
6371 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6372 nonlocal_goto_handler_labels);
6373 /* ??? Do not let expand_label treat us as such since we would
6374 not want to be both on the list of non-local labels and on
6375 the list of forced labels. */
6376 FORCED_LABEL (label) = 0;
6377 return const0_rtx;
6379 break;
6381 case BUILT_IN_SETJMP_DISPATCHER:
6382 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6383 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6385 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6386 rtx label_r = label_rtx (label);
6388 /* Remove the dispatcher label from the list of non-local labels
6389 since the receiver labels have been added to it above. */
6390 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6391 return const0_rtx;
6393 break;
6395 case BUILT_IN_SETJMP_RECEIVER:
6396 /* __builtin_setjmp_receiver is passed the receiver label. */
6397 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6399 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6400 rtx label_r = label_rtx (label);
6402 expand_builtin_setjmp_receiver (label_r);
6403 return const0_rtx;
6405 break;
6407 /* __builtin_longjmp is passed a pointer to an array of five words.
6408 It's similar to the C library longjmp function but works with
6409 __builtin_setjmp above. */
6410 case BUILT_IN_LONGJMP:
6411 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6413 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6414 VOIDmode, EXPAND_NORMAL);
6415 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6417 if (value != const1_rtx)
6419 error ("%<__builtin_longjmp%> second argument must be 1");
6420 return const0_rtx;
6423 expand_builtin_longjmp (buf_addr, value);
6424 return const0_rtx;
6426 break;
6428 case BUILT_IN_NONLOCAL_GOTO:
6429 target = expand_builtin_nonlocal_goto (exp);
6430 if (target)
6431 return target;
6432 break;
6434 /* This updates the setjmp buffer that is its argument with the value
6435 of the current stack pointer. */
6436 case BUILT_IN_UPDATE_SETJMP_BUF:
6437 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6439 rtx buf_addr
6440 = expand_normal (CALL_EXPR_ARG (exp, 0));
6442 expand_builtin_update_setjmp_buf (buf_addr);
6443 return const0_rtx;
6445 break;
6447 case BUILT_IN_TRAP:
6448 expand_builtin_trap ();
6449 return const0_rtx;
6451 case BUILT_IN_PRINTF:
6452 target = expand_builtin_printf (exp, target, mode, false);
6453 if (target)
6454 return target;
6455 break;
6457 case BUILT_IN_PRINTF_UNLOCKED:
6458 target = expand_builtin_printf (exp, target, mode, true);
6459 if (target)
6460 return target;
6461 break;
6463 case BUILT_IN_FPUTS:
6464 target = expand_builtin_fputs (exp, target, false);
6465 if (target)
6466 return target;
6467 break;
6468 case BUILT_IN_FPUTS_UNLOCKED:
6469 target = expand_builtin_fputs (exp, target, true);
6470 if (target)
6471 return target;
6472 break;
6474 case BUILT_IN_FPRINTF:
6475 target = expand_builtin_fprintf (exp, target, mode, false);
6476 if (target)
6477 return target;
6478 break;
6480 case BUILT_IN_FPRINTF_UNLOCKED:
6481 target = expand_builtin_fprintf (exp, target, mode, true);
6482 if (target)
6483 return target;
6484 break;
6486 case BUILT_IN_SPRINTF:
6487 target = expand_builtin_sprintf (exp, target, mode);
6488 if (target)
6489 return target;
6490 break;
6492 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6493 target = expand_builtin_signbit (exp, target);
6494 if (target)
6495 return target;
6496 break;
6498 /* Various hooks for the DWARF 2 __throw routine. */
6499 case BUILT_IN_UNWIND_INIT:
6500 expand_builtin_unwind_init ();
6501 return const0_rtx;
6502 case BUILT_IN_DWARF_CFA:
6503 return virtual_cfa_rtx;
6504 #ifdef DWARF2_UNWIND_INFO
6505 case BUILT_IN_DWARF_SP_COLUMN:
6506 return expand_builtin_dwarf_sp_column ();
6507 case BUILT_IN_INIT_DWARF_REG_SIZES:
6508 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6509 return const0_rtx;
6510 #endif
6511 case BUILT_IN_FROB_RETURN_ADDR:
6512 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6513 case BUILT_IN_EXTRACT_RETURN_ADDR:
6514 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6515 case BUILT_IN_EH_RETURN:
6516 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6517 CALL_EXPR_ARG (exp, 1));
6518 return const0_rtx;
6519 #ifdef EH_RETURN_DATA_REGNO
6520 case BUILT_IN_EH_RETURN_DATA_REGNO:
6521 return expand_builtin_eh_return_data_regno (exp);
6522 #endif
6523 case BUILT_IN_EXTEND_POINTER:
6524 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6526 case BUILT_IN_VA_START:
6527 case BUILT_IN_STDARG_START:
6528 return expand_builtin_va_start (exp);
6529 case BUILT_IN_VA_END:
6530 return expand_builtin_va_end (exp);
6531 case BUILT_IN_VA_COPY:
6532 return expand_builtin_va_copy (exp);
6533 case BUILT_IN_EXPECT:
6534 return expand_builtin_expect (exp, target);
6535 case BUILT_IN_PREFETCH:
6536 expand_builtin_prefetch (exp);
6537 return const0_rtx;
6539 case BUILT_IN_PROFILE_FUNC_ENTER:
6540 return expand_builtin_profile_func (false);
6541 case BUILT_IN_PROFILE_FUNC_EXIT:
6542 return expand_builtin_profile_func (true);
6544 case BUILT_IN_INIT_TRAMPOLINE:
6545 return expand_builtin_init_trampoline (exp);
6546 case BUILT_IN_ADJUST_TRAMPOLINE:
6547 return expand_builtin_adjust_trampoline (exp);
6549 case BUILT_IN_FORK:
6550 case BUILT_IN_EXECL:
6551 case BUILT_IN_EXECV:
6552 case BUILT_IN_EXECLP:
6553 case BUILT_IN_EXECLE:
6554 case BUILT_IN_EXECVP:
6555 case BUILT_IN_EXECVE:
6556 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6557 if (target)
6558 return target;
6559 break;
6561 case BUILT_IN_FETCH_AND_ADD_1:
6562 case BUILT_IN_FETCH_AND_ADD_2:
6563 case BUILT_IN_FETCH_AND_ADD_4:
6564 case BUILT_IN_FETCH_AND_ADD_8:
6565 case BUILT_IN_FETCH_AND_ADD_16:
6566 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6567 target = expand_builtin_sync_operation (mode, exp, PLUS,
6568 false, target, ignore);
6569 if (target)
6570 return target;
6571 break;
6573 case BUILT_IN_FETCH_AND_SUB_1:
6574 case BUILT_IN_FETCH_AND_SUB_2:
6575 case BUILT_IN_FETCH_AND_SUB_4:
6576 case BUILT_IN_FETCH_AND_SUB_8:
6577 case BUILT_IN_FETCH_AND_SUB_16:
6578 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6579 target = expand_builtin_sync_operation (mode, exp, MINUS,
6580 false, target, ignore);
6581 if (target)
6582 return target;
6583 break;
6585 case BUILT_IN_FETCH_AND_OR_1:
6586 case BUILT_IN_FETCH_AND_OR_2:
6587 case BUILT_IN_FETCH_AND_OR_4:
6588 case BUILT_IN_FETCH_AND_OR_8:
6589 case BUILT_IN_FETCH_AND_OR_16:
6590 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6591 target = expand_builtin_sync_operation (mode, exp, IOR,
6592 false, target, ignore);
6593 if (target)
6594 return target;
6595 break;
6597 case BUILT_IN_FETCH_AND_AND_1:
6598 case BUILT_IN_FETCH_AND_AND_2:
6599 case BUILT_IN_FETCH_AND_AND_4:
6600 case BUILT_IN_FETCH_AND_AND_8:
6601 case BUILT_IN_FETCH_AND_AND_16:
6602 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6603 target = expand_builtin_sync_operation (mode, exp, AND,
6604 false, target, ignore);
6605 if (target)
6606 return target;
6607 break;
6609 case BUILT_IN_FETCH_AND_XOR_1:
6610 case BUILT_IN_FETCH_AND_XOR_2:
6611 case BUILT_IN_FETCH_AND_XOR_4:
6612 case BUILT_IN_FETCH_AND_XOR_8:
6613 case BUILT_IN_FETCH_AND_XOR_16:
6614 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6615 target = expand_builtin_sync_operation (mode, exp, XOR,
6616 false, target, ignore);
6617 if (target)
6618 return target;
6619 break;
6621 case BUILT_IN_FETCH_AND_NAND_1:
6622 case BUILT_IN_FETCH_AND_NAND_2:
6623 case BUILT_IN_FETCH_AND_NAND_4:
6624 case BUILT_IN_FETCH_AND_NAND_8:
6625 case BUILT_IN_FETCH_AND_NAND_16:
6626 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6627 target = expand_builtin_sync_operation (mode, exp, NOT,
6628 false, target, ignore);
6629 if (target)
6630 return target;
6631 break;
6633 case BUILT_IN_ADD_AND_FETCH_1:
6634 case BUILT_IN_ADD_AND_FETCH_2:
6635 case BUILT_IN_ADD_AND_FETCH_4:
6636 case BUILT_IN_ADD_AND_FETCH_8:
6637 case BUILT_IN_ADD_AND_FETCH_16:
6638 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6639 target = expand_builtin_sync_operation (mode, exp, PLUS,
6640 true, target, ignore);
6641 if (target)
6642 return target;
6643 break;
6645 case BUILT_IN_SUB_AND_FETCH_1:
6646 case BUILT_IN_SUB_AND_FETCH_2:
6647 case BUILT_IN_SUB_AND_FETCH_4:
6648 case BUILT_IN_SUB_AND_FETCH_8:
6649 case BUILT_IN_SUB_AND_FETCH_16:
6650 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6651 target = expand_builtin_sync_operation (mode, exp, MINUS,
6652 true, target, ignore);
6653 if (target)
6654 return target;
6655 break;
6657 case BUILT_IN_OR_AND_FETCH_1:
6658 case BUILT_IN_OR_AND_FETCH_2:
6659 case BUILT_IN_OR_AND_FETCH_4:
6660 case BUILT_IN_OR_AND_FETCH_8:
6661 case BUILT_IN_OR_AND_FETCH_16:
6662 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6663 target = expand_builtin_sync_operation (mode, exp, IOR,
6664 true, target, ignore);
6665 if (target)
6666 return target;
6667 break;
6669 case BUILT_IN_AND_AND_FETCH_1:
6670 case BUILT_IN_AND_AND_FETCH_2:
6671 case BUILT_IN_AND_AND_FETCH_4:
6672 case BUILT_IN_AND_AND_FETCH_8:
6673 case BUILT_IN_AND_AND_FETCH_16:
6674 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6675 target = expand_builtin_sync_operation (mode, exp, AND,
6676 true, target, ignore);
6677 if (target)
6678 return target;
6679 break;
6681 case BUILT_IN_XOR_AND_FETCH_1:
6682 case BUILT_IN_XOR_AND_FETCH_2:
6683 case BUILT_IN_XOR_AND_FETCH_4:
6684 case BUILT_IN_XOR_AND_FETCH_8:
6685 case BUILT_IN_XOR_AND_FETCH_16:
6686 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6687 target = expand_builtin_sync_operation (mode, exp, XOR,
6688 true, target, ignore);
6689 if (target)
6690 return target;
6691 break;
6693 case BUILT_IN_NAND_AND_FETCH_1:
6694 case BUILT_IN_NAND_AND_FETCH_2:
6695 case BUILT_IN_NAND_AND_FETCH_4:
6696 case BUILT_IN_NAND_AND_FETCH_8:
6697 case BUILT_IN_NAND_AND_FETCH_16:
6698 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6699 target = expand_builtin_sync_operation (mode, exp, NOT,
6700 true, target, ignore);
6701 if (target)
6702 return target;
6703 break;
6705 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6706 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6707 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6708 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6709 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6710 if (mode == VOIDmode)
6711 mode = TYPE_MODE (boolean_type_node);
6712 if (!target || !register_operand (target, mode))
6713 target = gen_reg_rtx (mode);
6715 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6716 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6717 if (target)
6718 return target;
6719 break;
6721 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6722 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6723 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6724 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6725 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6726 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6727 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6728 if (target)
6729 return target;
6730 break;
6732 case BUILT_IN_LOCK_TEST_AND_SET_1:
6733 case BUILT_IN_LOCK_TEST_AND_SET_2:
6734 case BUILT_IN_LOCK_TEST_AND_SET_4:
6735 case BUILT_IN_LOCK_TEST_AND_SET_8:
6736 case BUILT_IN_LOCK_TEST_AND_SET_16:
6737 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6738 target = expand_builtin_lock_test_and_set (mode, exp, target);
6739 if (target)
6740 return target;
6741 break;
6743 case BUILT_IN_LOCK_RELEASE_1:
6744 case BUILT_IN_LOCK_RELEASE_2:
6745 case BUILT_IN_LOCK_RELEASE_4:
6746 case BUILT_IN_LOCK_RELEASE_8:
6747 case BUILT_IN_LOCK_RELEASE_16:
6748 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6749 expand_builtin_lock_release (mode, exp);
6750 return const0_rtx;
6752 case BUILT_IN_SYNCHRONIZE:
6753 expand_builtin_synchronize ();
6754 return const0_rtx;
6756 case BUILT_IN_OBJECT_SIZE:
6757 return expand_builtin_object_size (exp);
6759 case BUILT_IN_MEMCPY_CHK:
6760 case BUILT_IN_MEMPCPY_CHK:
6761 case BUILT_IN_MEMMOVE_CHK:
6762 case BUILT_IN_MEMSET_CHK:
6763 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6764 if (target)
6765 return target;
6766 break;
6768 case BUILT_IN_STRCPY_CHK:
6769 case BUILT_IN_STPCPY_CHK:
6770 case BUILT_IN_STRNCPY_CHK:
6771 case BUILT_IN_STRCAT_CHK:
6772 case BUILT_IN_STRNCAT_CHK:
6773 case BUILT_IN_SNPRINTF_CHK:
6774 case BUILT_IN_VSNPRINTF_CHK:
6775 maybe_emit_chk_warning (exp, fcode);
6776 break;
6778 case BUILT_IN_SPRINTF_CHK:
6779 case BUILT_IN_VSPRINTF_CHK:
6780 maybe_emit_sprintf_chk_warning (exp, fcode);
6781 break;
6783 default: /* just do library call, if unknown builtin */
6784 break;
6787 /* The switch statement above can drop through to cause the function
6788 to be called normally. */
6789 return expand_call (exp, target, ignore);
6792 /* Determine whether a tree node represents a call to a built-in
6793 function. If the tree T is a call to a built-in function with
6794 the right number of arguments of the appropriate types, return
6795 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6796 Otherwise the return value is END_BUILTINS. */
6798 enum built_in_function
6799 builtin_mathfn_code (tree t)
6801 tree fndecl, arg, parmlist;
6802 tree argtype, parmtype;
6803 call_expr_arg_iterator iter;
6805 if (TREE_CODE (t) != CALL_EXPR
6806 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6807 return END_BUILTINS;
6809 fndecl = get_callee_fndecl (t);
6810 if (fndecl == NULL_TREE
6811 || TREE_CODE (fndecl) != FUNCTION_DECL
6812 || ! DECL_BUILT_IN (fndecl)
6813 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6814 return END_BUILTINS;
6816 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6817 init_call_expr_arg_iterator (t, &iter);
6818 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6820 /* If a function doesn't take a variable number of arguments,
6821 the last element in the list will have type `void'. */
6822 parmtype = TREE_VALUE (parmlist);
6823 if (VOID_TYPE_P (parmtype))
6825 if (more_call_expr_args_p (&iter))
6826 return END_BUILTINS;
6827 return DECL_FUNCTION_CODE (fndecl);
6830 if (! more_call_expr_args_p (&iter))
6831 return END_BUILTINS;
6833 arg = next_call_expr_arg (&iter);
6834 argtype = TREE_TYPE (arg);
6836 if (SCALAR_FLOAT_TYPE_P (parmtype))
6838 if (! SCALAR_FLOAT_TYPE_P (argtype))
6839 return END_BUILTINS;
6841 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6843 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6844 return END_BUILTINS;
6846 else if (POINTER_TYPE_P (parmtype))
6848 if (! POINTER_TYPE_P (argtype))
6849 return END_BUILTINS;
6851 else if (INTEGRAL_TYPE_P (parmtype))
6853 if (! INTEGRAL_TYPE_P (argtype))
6854 return END_BUILTINS;
6856 else
6857 return END_BUILTINS;
6860 /* Variable-length argument list. */
6861 return DECL_FUNCTION_CODE (fndecl);
6864 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6865 evaluate to a constant. */
6867 static tree
6868 fold_builtin_constant_p (tree arg)
6870 /* We return 1 for a numeric type that's known to be a constant
6871 value at compile-time or for an aggregate type that's a
6872 literal constant. */
6873 STRIP_NOPS (arg);
6875 /* If we know this is a constant, emit the constant of one. */
6876 if (CONSTANT_CLASS_P (arg)
6877 || (TREE_CODE (arg) == CONSTRUCTOR
6878 && TREE_CONSTANT (arg)))
6879 return integer_one_node;
6880 if (TREE_CODE (arg) == ADDR_EXPR)
6882 tree op = TREE_OPERAND (arg, 0);
6883 if (TREE_CODE (op) == STRING_CST
6884 || (TREE_CODE (op) == ARRAY_REF
6885 && integer_zerop (TREE_OPERAND (op, 1))
6886 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6887 return integer_one_node;
6890 /* If this expression has side effects, show we don't know it to be a
6891 constant. Likewise if it's a pointer or aggregate type since in
6892 those case we only want literals, since those are only optimized
6893 when generating RTL, not later.
6894 And finally, if we are compiling an initializer, not code, we
6895 need to return a definite result now; there's not going to be any
6896 more optimization done. */
6897 if (TREE_SIDE_EFFECTS (arg)
6898 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6899 || POINTER_TYPE_P (TREE_TYPE (arg))
6900 || cfun == 0
6901 || folding_initializer)
6902 return integer_zero_node;
6904 return NULL_TREE;
6907 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
6908 comparison against the argument will fold to a constant. In practice,
6909 this means a true constant or the address of a non-weak symbol. */
6911 static tree
6912 fold_builtin_expect (tree arg)
6914 tree inner;
6916 /* If the argument isn't invariant, then there's nothing we can do. */
6917 if (!TREE_INVARIANT (arg))
6918 return NULL_TREE;
6920 /* If we're looking at an address of a weak decl, then do not fold. */
6921 inner = arg;
6922 STRIP_NOPS (inner);
6923 if (TREE_CODE (inner) == ADDR_EXPR)
6927 inner = TREE_OPERAND (inner, 0);
6929 while (TREE_CODE (inner) == COMPONENT_REF
6930 || TREE_CODE (inner) == ARRAY_REF);
6931 if (DECL_P (inner) && DECL_WEAK (inner))
6932 return NULL_TREE;
6935 /* Otherwise, ARG already has the proper type for the return value. */
6936 return arg;
6939 /* Fold a call to __builtin_classify_type with argument ARG. */
6941 static tree
6942 fold_builtin_classify_type (tree arg)
6944 if (arg == 0)
6945 return build_int_cst (NULL_TREE, no_type_class);
6947 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6950 /* Fold a call to __builtin_strlen with argument ARG. */
6952 static tree
6953 fold_builtin_strlen (tree arg)
6955 if (!validate_arg (arg, POINTER_TYPE))
6956 return NULL_TREE;
6957 else
6959 tree len = c_strlen (arg, 0);
6961 if (len)
6963 /* Convert from the internal "sizetype" type to "size_t". */
6964 if (size_type_node)
6965 len = fold_convert (size_type_node, len);
6966 return len;
6969 return NULL_TREE;
6973 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6975 static tree
6976 fold_builtin_inf (tree type, int warn)
6978 REAL_VALUE_TYPE real;
6980 /* __builtin_inff is intended to be usable to define INFINITY on all
6981 targets. If an infinity is not available, INFINITY expands "to a
6982 positive constant of type float that overflows at translation
6983 time", footnote "In this case, using INFINITY will violate the
6984 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6985 Thus we pedwarn to ensure this constraint violation is
6986 diagnosed. */
6987 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6988 pedwarn ("target format does not support infinity");
6990 real_inf (&real);
6991 return build_real (type, real);
6994 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6996 static tree
6997 fold_builtin_nan (tree arg, tree type, int quiet)
6999 REAL_VALUE_TYPE real;
7000 const char *str;
7002 if (!validate_arg (arg, POINTER_TYPE))
7003 return NULL_TREE;
7004 str = c_getstr (arg);
7005 if (!str)
7006 return NULL_TREE;
7008 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7009 return NULL_TREE;
7011 return build_real (type, real);
7014 /* Return true if the floating point expression T has an integer value.
7015 We also allow +Inf, -Inf and NaN to be considered integer values. */
7017 static bool
7018 integer_valued_real_p (tree t)
7020 switch (TREE_CODE (t))
7022 case FLOAT_EXPR:
7023 return true;
7025 case ABS_EXPR:
7026 case SAVE_EXPR:
7027 case NON_LVALUE_EXPR:
7028 return integer_valued_real_p (TREE_OPERAND (t, 0));
7030 case COMPOUND_EXPR:
7031 case MODIFY_EXPR:
7032 case BIND_EXPR:
7033 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7035 case PLUS_EXPR:
7036 case MINUS_EXPR:
7037 case MULT_EXPR:
7038 case MIN_EXPR:
7039 case MAX_EXPR:
7040 return integer_valued_real_p (TREE_OPERAND (t, 0))
7041 && integer_valued_real_p (TREE_OPERAND (t, 1));
7043 case COND_EXPR:
7044 return integer_valued_real_p (TREE_OPERAND (t, 1))
7045 && integer_valued_real_p (TREE_OPERAND (t, 2));
7047 case REAL_CST:
7048 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7050 case NOP_EXPR:
7052 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7053 if (TREE_CODE (type) == INTEGER_TYPE)
7054 return true;
7055 if (TREE_CODE (type) == REAL_TYPE)
7056 return integer_valued_real_p (TREE_OPERAND (t, 0));
7057 break;
7060 case CALL_EXPR:
7061 switch (builtin_mathfn_code (t))
7063 CASE_FLT_FN (BUILT_IN_CEIL):
7064 CASE_FLT_FN (BUILT_IN_FLOOR):
7065 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7066 CASE_FLT_FN (BUILT_IN_RINT):
7067 CASE_FLT_FN (BUILT_IN_ROUND):
7068 CASE_FLT_FN (BUILT_IN_TRUNC):
7069 return true;
7071 CASE_FLT_FN (BUILT_IN_FMIN):
7072 CASE_FLT_FN (BUILT_IN_FMAX):
7073 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7074 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7076 default:
7077 break;
7079 break;
7081 default:
7082 break;
7084 return false;
7087 /* FNDECL is assumed to be a builtin where truncation can be propagated
7088 across (for instance floor((double)f) == (double)floorf (f).
7089 Do the transformation for a call with argument ARG. */
7091 static tree
7092 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7094 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7096 if (!validate_arg (arg, REAL_TYPE))
7097 return NULL_TREE;
7099 /* Integer rounding functions are idempotent. */
7100 if (fcode == builtin_mathfn_code (arg))
7101 return arg;
7103 /* If argument is already integer valued, and we don't need to worry
7104 about setting errno, there's no need to perform rounding. */
7105 if (! flag_errno_math && integer_valued_real_p (arg))
7106 return arg;
7108 if (optimize)
7110 tree arg0 = strip_float_extensions (arg);
7111 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7112 tree newtype = TREE_TYPE (arg0);
7113 tree decl;
7115 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7116 && (decl = mathfn_built_in (newtype, fcode)))
7117 return fold_convert (ftype,
7118 build_call_expr (decl, 1,
7119 fold_convert (newtype, arg0)));
7121 return NULL_TREE;
7124 /* FNDECL is assumed to be builtin which can narrow the FP type of
7125 the argument, for instance lround((double)f) -> lroundf (f).
7126 Do the transformation for a call with argument ARG. */
7128 static tree
7129 fold_fixed_mathfn (tree fndecl, tree arg)
7131 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7133 if (!validate_arg (arg, REAL_TYPE))
7134 return NULL_TREE;
7136 /* If argument is already integer valued, and we don't need to worry
7137 about setting errno, there's no need to perform rounding. */
7138 if (! flag_errno_math && integer_valued_real_p (arg))
7139 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7141 if (optimize)
7143 tree ftype = TREE_TYPE (arg);
7144 tree arg0 = strip_float_extensions (arg);
7145 tree newtype = TREE_TYPE (arg0);
7146 tree decl;
7148 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7149 && (decl = mathfn_built_in (newtype, fcode)))
7150 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7153 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7154 sizeof (long long) == sizeof (long). */
7155 if (TYPE_PRECISION (long_long_integer_type_node)
7156 == TYPE_PRECISION (long_integer_type_node))
7158 tree newfn = NULL_TREE;
7159 switch (fcode)
7161 CASE_FLT_FN (BUILT_IN_LLCEIL):
7162 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7163 break;
7165 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7166 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7167 break;
7169 CASE_FLT_FN (BUILT_IN_LLROUND):
7170 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7171 break;
7173 CASE_FLT_FN (BUILT_IN_LLRINT):
7174 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7175 break;
7177 default:
7178 break;
7181 if (newfn)
7183 tree newcall = build_call_expr(newfn, 1, arg);
7184 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7188 return NULL_TREE;
7191 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7192 return type. Return NULL_TREE if no simplification can be made. */
7194 static tree
7195 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7197 tree res;
7199 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7200 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7201 return NULL_TREE;
7203 /* Calculate the result when the argument is a constant. */
7204 if (TREE_CODE (arg) == COMPLEX_CST
7205 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7206 type, mpfr_hypot)))
7207 return res;
7209 if (TREE_CODE (arg) == COMPLEX_EXPR)
7211 tree real = TREE_OPERAND (arg, 0);
7212 tree imag = TREE_OPERAND (arg, 1);
7214 /* If either part is zero, cabs is fabs of the other. */
7215 if (real_zerop (real))
7216 return fold_build1 (ABS_EXPR, type, imag);
7217 if (real_zerop (imag))
7218 return fold_build1 (ABS_EXPR, type, real);
7220 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7221 if (flag_unsafe_math_optimizations
7222 && operand_equal_p (real, imag, OEP_PURE_SAME))
7224 const REAL_VALUE_TYPE sqrt2_trunc
7225 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7226 STRIP_NOPS (real);
7227 return fold_build2 (MULT_EXPR, type,
7228 fold_build1 (ABS_EXPR, type, real),
7229 build_real (type, sqrt2_trunc));
7233 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7234 if (TREE_CODE (arg) == NEGATE_EXPR
7235 || TREE_CODE (arg) == CONJ_EXPR)
7236 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7238 /* Don't do this when optimizing for size. */
7239 if (flag_unsafe_math_optimizations
7240 && optimize && !optimize_size)
7242 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7244 if (sqrtfn != NULL_TREE)
7246 tree rpart, ipart, result;
7248 arg = builtin_save_expr (arg);
7250 rpart = fold_build1 (REALPART_EXPR, type, arg);
7251 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7253 rpart = builtin_save_expr (rpart);
7254 ipart = builtin_save_expr (ipart);
7256 result = fold_build2 (PLUS_EXPR, type,
7257 fold_build2 (MULT_EXPR, type,
7258 rpart, rpart),
7259 fold_build2 (MULT_EXPR, type,
7260 ipart, ipart));
7262 return build_call_expr (sqrtfn, 1, result);
7266 return NULL_TREE;
7269 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7270 Return NULL_TREE if no simplification can be made. */
7272 static tree
7273 fold_builtin_sqrt (tree arg, tree type)
7276 enum built_in_function fcode;
7277 tree res;
7279 if (!validate_arg (arg, REAL_TYPE))
7280 return NULL_TREE;
7282 /* Calculate the result when the argument is a constant. */
7283 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7284 return res;
7286 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7287 fcode = builtin_mathfn_code (arg);
7288 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7290 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7291 arg = fold_build2 (MULT_EXPR, type,
7292 CALL_EXPR_ARG (arg, 0),
7293 build_real (type, dconsthalf));
7294 return build_call_expr (expfn, 1, arg);
7297 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7298 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7300 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7302 if (powfn)
7304 tree arg0 = CALL_EXPR_ARG (arg, 0);
7305 tree tree_root;
7306 /* The inner root was either sqrt or cbrt. */
7307 REAL_VALUE_TYPE dconstroot =
7308 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
7310 /* Adjust for the outer root. */
7311 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7312 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7313 tree_root = build_real (type, dconstroot);
7314 return build_call_expr (powfn, 2, arg0, tree_root);
7318 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7319 if (flag_unsafe_math_optimizations
7320 && (fcode == BUILT_IN_POW
7321 || fcode == BUILT_IN_POWF
7322 || fcode == BUILT_IN_POWL))
7324 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7325 tree arg0 = CALL_EXPR_ARG (arg, 0);
7326 tree arg1 = CALL_EXPR_ARG (arg, 1);
7327 tree narg1;
7328 if (!tree_expr_nonnegative_p (arg0))
7329 arg0 = build1 (ABS_EXPR, type, arg0);
7330 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7331 build_real (type, dconsthalf));
7332 return build_call_expr (powfn, 2, arg0, narg1);
7335 return NULL_TREE;
7338 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7339 Return NULL_TREE if no simplification can be made. */
7341 static tree
7342 fold_builtin_cbrt (tree arg, tree type)
7344 const enum built_in_function fcode = builtin_mathfn_code (arg);
7345 tree res;
7347 if (!validate_arg (arg, REAL_TYPE))
7348 return NULL_TREE;
7350 /* Calculate the result when the argument is a constant. */
7351 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7352 return res;
7354 if (flag_unsafe_math_optimizations)
7356 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7357 if (BUILTIN_EXPONENT_P (fcode))
7359 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7360 const REAL_VALUE_TYPE third_trunc =
7361 real_value_truncate (TYPE_MODE (type), dconstthird);
7362 arg = fold_build2 (MULT_EXPR, type,
7363 CALL_EXPR_ARG (arg, 0),
7364 build_real (type, third_trunc));
7365 return build_call_expr (expfn, 1, arg);
7368 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7369 if (BUILTIN_SQRT_P (fcode))
7371 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7373 if (powfn)
7375 tree arg0 = CALL_EXPR_ARG (arg, 0);
7376 tree tree_root;
7377 REAL_VALUE_TYPE dconstroot = dconstthird;
7379 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7380 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7381 tree_root = build_real (type, dconstroot);
7382 return build_call_expr (powfn, 2, arg0, tree_root);
7386 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7387 if (BUILTIN_CBRT_P (fcode))
7389 tree arg0 = CALL_EXPR_ARG (arg, 0);
7390 if (tree_expr_nonnegative_p (arg0))
7392 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7394 if (powfn)
7396 tree tree_root;
7397 REAL_VALUE_TYPE dconstroot;
7399 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
7400 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7401 tree_root = build_real (type, dconstroot);
7402 return build_call_expr (powfn, 2, arg0, tree_root);
7407 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7408 if (fcode == BUILT_IN_POW
7409 || fcode == BUILT_IN_POWF
7410 || fcode == BUILT_IN_POWL)
7412 tree arg00 = CALL_EXPR_ARG (arg, 0);
7413 tree arg01 = CALL_EXPR_ARG (arg, 1);
7414 if (tree_expr_nonnegative_p (arg00))
7416 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7417 const REAL_VALUE_TYPE dconstroot
7418 = real_value_truncate (TYPE_MODE (type), dconstthird);
7419 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7420 build_real (type, dconstroot));
7421 return build_call_expr (powfn, 2, arg00, narg01);
7425 return NULL_TREE;
7428 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7429 TYPE is the type of the return value. Return NULL_TREE if no
7430 simplification can be made. */
7432 static tree
7433 fold_builtin_cos (tree arg, tree type, tree fndecl)
7435 tree res, narg;
7437 if (!validate_arg (arg, REAL_TYPE))
7438 return NULL_TREE;
7440 /* Calculate the result when the argument is a constant. */
7441 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7442 return res;
7444 /* Optimize cos(-x) into cos (x). */
7445 if ((narg = fold_strip_sign_ops (arg)))
7446 return build_call_expr (fndecl, 1, narg);
7448 return NULL_TREE;
7451 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7452 Return NULL_TREE if no simplification can be made. */
7454 static tree
7455 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7457 if (validate_arg (arg, REAL_TYPE))
7459 tree res, narg;
7461 /* Calculate the result when the argument is a constant. */
7462 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7463 return res;
7465 /* Optimize cosh(-x) into cosh (x). */
7466 if ((narg = fold_strip_sign_ops (arg)))
7467 return build_call_expr (fndecl, 1, narg);
7470 return NULL_TREE;
7473 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7474 Return NULL_TREE if no simplification can be made. */
7476 static tree
7477 fold_builtin_tan (tree arg, tree type)
7479 enum built_in_function fcode;
7480 tree res;
7482 if (!validate_arg (arg, REAL_TYPE))
7483 return NULL_TREE;
7485 /* Calculate the result when the argument is a constant. */
7486 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7487 return res;
7489 /* Optimize tan(atan(x)) = x. */
7490 fcode = builtin_mathfn_code (arg);
7491 if (flag_unsafe_math_optimizations
7492 && (fcode == BUILT_IN_ATAN
7493 || fcode == BUILT_IN_ATANF
7494 || fcode == BUILT_IN_ATANL))
7495 return CALL_EXPR_ARG (arg, 0);
7497 return NULL_TREE;
7500 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7501 NULL_TREE if no simplification can be made. */
7503 static tree
7504 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7506 tree type;
7507 tree res, fn, call;
7509 if (!validate_arg (arg0, REAL_TYPE)
7510 || !validate_arg (arg1, POINTER_TYPE)
7511 || !validate_arg (arg2, POINTER_TYPE))
7512 return NULL_TREE;
7514 type = TREE_TYPE (arg0);
7516 /* Calculate the result when the argument is a constant. */
7517 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7518 return res;
7520 /* Canonicalize sincos to cexpi. */
7521 if (!TARGET_C99_FUNCTIONS)
7522 return NULL_TREE;
7523 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7524 if (!fn)
7525 return NULL_TREE;
7527 call = build_call_expr (fn, 1, arg0);
7528 call = builtin_save_expr (call);
7530 return build2 (COMPOUND_EXPR, type,
7531 build2 (MODIFY_EXPR, void_type_node,
7532 build_fold_indirect_ref (arg1),
7533 build1 (IMAGPART_EXPR, type, call)),
7534 build2 (MODIFY_EXPR, void_type_node,
7535 build_fold_indirect_ref (arg2),
7536 build1 (REALPART_EXPR, type, call)));
7539 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7540 NULL_TREE if no simplification can be made. */
7542 static tree
7543 fold_builtin_cexp (tree arg0, tree type)
7545 tree rtype;
7546 tree realp, imagp, ifn;
7548 if (!validate_arg (arg0, COMPLEX_TYPE))
7549 return NULL_TREE;
7551 rtype = TREE_TYPE (TREE_TYPE (arg0));
7553 /* In case we can figure out the real part of arg0 and it is constant zero
7554 fold to cexpi. */
7555 if (!TARGET_C99_FUNCTIONS)
7556 return NULL_TREE;
7557 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7558 if (!ifn)
7559 return NULL_TREE;
7561 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7562 && real_zerop (realp))
7564 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7565 return build_call_expr (ifn, 1, narg);
7568 /* In case we can easily decompose real and imaginary parts split cexp
7569 to exp (r) * cexpi (i). */
7570 if (flag_unsafe_math_optimizations
7571 && realp)
7573 tree rfn, rcall, icall;
7575 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7576 if (!rfn)
7577 return NULL_TREE;
7579 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7580 if (!imagp)
7581 return NULL_TREE;
7583 icall = build_call_expr (ifn, 1, imagp);
7584 icall = builtin_save_expr (icall);
7585 rcall = build_call_expr (rfn, 1, realp);
7586 rcall = builtin_save_expr (rcall);
7587 return build2 (COMPLEX_EXPR, type,
7588 build2 (MULT_EXPR, rtype,
7589 rcall,
7590 build1 (REALPART_EXPR, rtype, icall)),
7591 build2 (MULT_EXPR, rtype,
7592 rcall,
7593 build1 (IMAGPART_EXPR, rtype, icall)));
7596 return NULL_TREE;
7599 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7600 Return NULL_TREE if no simplification can be made. */
7602 static tree
7603 fold_builtin_trunc (tree fndecl, tree arg)
7605 if (!validate_arg (arg, REAL_TYPE))
7606 return NULL_TREE;
7608 /* Optimize trunc of constant value. */
7609 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7611 REAL_VALUE_TYPE r, x;
7612 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7614 x = TREE_REAL_CST (arg);
7615 real_trunc (&r, TYPE_MODE (type), &x);
7616 return build_real (type, r);
7619 return fold_trunc_transparent_mathfn (fndecl, arg);
7622 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7623 Return NULL_TREE if no simplification can be made. */
7625 static tree
7626 fold_builtin_floor (tree fndecl, tree arg)
7628 if (!validate_arg (arg, REAL_TYPE))
7629 return NULL_TREE;
7631 /* Optimize floor of constant value. */
7632 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7634 REAL_VALUE_TYPE x;
7636 x = TREE_REAL_CST (arg);
7637 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7639 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7640 REAL_VALUE_TYPE r;
7642 real_floor (&r, TYPE_MODE (type), &x);
7643 return build_real (type, r);
7647 /* Fold floor (x) where x is nonnegative to trunc (x). */
7648 if (tree_expr_nonnegative_p (arg))
7650 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7651 if (truncfn)
7652 return build_call_expr (truncfn, 1, arg);
7655 return fold_trunc_transparent_mathfn (fndecl, arg);
7658 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7659 Return NULL_TREE if no simplification can be made. */
7661 static tree
7662 fold_builtin_ceil (tree fndecl, tree arg)
7664 if (!validate_arg (arg, REAL_TYPE))
7665 return NULL_TREE;
7667 /* Optimize ceil of constant value. */
7668 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7670 REAL_VALUE_TYPE x;
7672 x = TREE_REAL_CST (arg);
7673 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7675 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7676 REAL_VALUE_TYPE r;
7678 real_ceil (&r, TYPE_MODE (type), &x);
7679 return build_real (type, r);
7683 return fold_trunc_transparent_mathfn (fndecl, arg);
7686 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7687 Return NULL_TREE if no simplification can be made. */
7689 static tree
7690 fold_builtin_round (tree fndecl, tree arg)
7692 if (!validate_arg (arg, REAL_TYPE))
7693 return NULL_TREE;
7695 /* Optimize round of constant value. */
7696 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7698 REAL_VALUE_TYPE x;
7700 x = TREE_REAL_CST (arg);
7701 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7703 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7704 REAL_VALUE_TYPE r;
7706 real_round (&r, TYPE_MODE (type), &x);
7707 return build_real (type, r);
7711 return fold_trunc_transparent_mathfn (fndecl, arg);
7714 /* Fold function call to builtin lround, lroundf or lroundl (or the
7715 corresponding long long versions) and other rounding functions. ARG
7716 is the argument to the call. Return NULL_TREE if no simplification
7717 can be made. */
7719 static tree
7720 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7722 if (!validate_arg (arg, REAL_TYPE))
7723 return NULL_TREE;
7725 /* Optimize lround of constant value. */
7726 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7728 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7730 if (! REAL_VALUE_ISNAN (x) && ! REAL_VALUE_ISINF (x))
7732 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7733 tree ftype = TREE_TYPE (arg);
7734 unsigned HOST_WIDE_INT lo2;
7735 HOST_WIDE_INT hi, lo;
7736 REAL_VALUE_TYPE r;
7738 switch (DECL_FUNCTION_CODE (fndecl))
7740 CASE_FLT_FN (BUILT_IN_LFLOOR):
7741 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7742 real_floor (&r, TYPE_MODE (ftype), &x);
7743 break;
7745 CASE_FLT_FN (BUILT_IN_LCEIL):
7746 CASE_FLT_FN (BUILT_IN_LLCEIL):
7747 real_ceil (&r, TYPE_MODE (ftype), &x);
7748 break;
7750 CASE_FLT_FN (BUILT_IN_LROUND):
7751 CASE_FLT_FN (BUILT_IN_LLROUND):
7752 real_round (&r, TYPE_MODE (ftype), &x);
7753 break;
7755 default:
7756 gcc_unreachable ();
7759 REAL_VALUE_TO_INT (&lo, &hi, r);
7760 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7761 return build_int_cst_wide (itype, lo2, hi);
7765 switch (DECL_FUNCTION_CODE (fndecl))
7767 CASE_FLT_FN (BUILT_IN_LFLOOR):
7768 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7769 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7770 if (tree_expr_nonnegative_p (arg))
7771 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7772 arg);
7773 break;
7774 default:;
7777 return fold_fixed_mathfn (fndecl, arg);
7780 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7781 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7782 the argument to the call. Return NULL_TREE if no simplification can
7783 be made. */
7785 static tree
7786 fold_builtin_bitop (tree fndecl, tree arg)
7788 if (!validate_arg (arg, INTEGER_TYPE))
7789 return NULL_TREE;
7791 /* Optimize for constant argument. */
7792 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7794 HOST_WIDE_INT hi, width, result;
7795 unsigned HOST_WIDE_INT lo;
7796 tree type;
7798 type = TREE_TYPE (arg);
7799 width = TYPE_PRECISION (type);
7800 lo = TREE_INT_CST_LOW (arg);
7802 /* Clear all the bits that are beyond the type's precision. */
7803 if (width > HOST_BITS_PER_WIDE_INT)
7805 hi = TREE_INT_CST_HIGH (arg);
7806 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7807 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7809 else
7811 hi = 0;
7812 if (width < HOST_BITS_PER_WIDE_INT)
7813 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7816 switch (DECL_FUNCTION_CODE (fndecl))
7818 CASE_INT_FN (BUILT_IN_FFS):
7819 if (lo != 0)
7820 result = exact_log2 (lo & -lo) + 1;
7821 else if (hi != 0)
7822 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7823 else
7824 result = 0;
7825 break;
7827 CASE_INT_FN (BUILT_IN_CLZ):
7828 if (hi != 0)
7829 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7830 else if (lo != 0)
7831 result = width - floor_log2 (lo) - 1;
7832 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7833 result = width;
7834 break;
7836 CASE_INT_FN (BUILT_IN_CTZ):
7837 if (lo != 0)
7838 result = exact_log2 (lo & -lo);
7839 else if (hi != 0)
7840 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7841 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7842 result = width;
7843 break;
7845 CASE_INT_FN (BUILT_IN_POPCOUNT):
7846 result = 0;
7847 while (lo)
7848 result++, lo &= lo - 1;
7849 while (hi)
7850 result++, hi &= hi - 1;
7851 break;
7853 CASE_INT_FN (BUILT_IN_PARITY):
7854 result = 0;
7855 while (lo)
7856 result++, lo &= lo - 1;
7857 while (hi)
7858 result++, hi &= hi - 1;
7859 result &= 1;
7860 break;
7862 default:
7863 gcc_unreachable ();
7866 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7869 return NULL_TREE;
7872 /* Fold function call to builtin_bswap and the long and long long
7873 variants. Return NULL_TREE if no simplification can be made. */
7874 static tree
7875 fold_builtin_bswap (tree fndecl, tree arg)
7877 if (! validate_arg (arg, INTEGER_TYPE))
7878 return NULL_TREE;
7880 /* Optimize constant value. */
7881 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7883 HOST_WIDE_INT hi, width, r_hi = 0;
7884 unsigned HOST_WIDE_INT lo, r_lo = 0;
7885 tree type;
7887 type = TREE_TYPE (arg);
7888 width = TYPE_PRECISION (type);
7889 lo = TREE_INT_CST_LOW (arg);
7890 hi = TREE_INT_CST_HIGH (arg);
7892 switch (DECL_FUNCTION_CODE (fndecl))
7894 case BUILT_IN_BSWAP32:
7895 case BUILT_IN_BSWAP64:
7897 int s;
7899 for (s = 0; s < width; s += 8)
7901 int d = width - s - 8;
7902 unsigned HOST_WIDE_INT byte;
7904 if (s < HOST_BITS_PER_WIDE_INT)
7905 byte = (lo >> s) & 0xff;
7906 else
7907 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7909 if (d < HOST_BITS_PER_WIDE_INT)
7910 r_lo |= byte << d;
7911 else
7912 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7916 break;
7918 default:
7919 gcc_unreachable ();
7922 if (width < HOST_BITS_PER_WIDE_INT)
7923 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7924 else
7925 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7928 return NULL_TREE;
7931 /* Return true if EXPR is the real constant contained in VALUE. */
7933 static bool
7934 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
7936 STRIP_NOPS (expr);
7938 return ((TREE_CODE (expr) == REAL_CST
7939 && !TREE_OVERFLOW (expr)
7940 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
7941 || (TREE_CODE (expr) == COMPLEX_CST
7942 && real_dconstp (TREE_REALPART (expr), value)
7943 && real_zerop (TREE_IMAGPART (expr))));
7946 /* A subroutine of fold_builtin to fold the various logarithmic
7947 functions. Return NULL_TREE if no simplification can me made.
7948 FUNC is the corresponding MPFR logarithm function. */
7950 static tree
7951 fold_builtin_logarithm (tree fndecl, tree arg,
7952 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7954 if (validate_arg (arg, REAL_TYPE))
7956 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7957 tree res;
7958 const enum built_in_function fcode = builtin_mathfn_code (arg);
7960 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
7961 instead we'll look for 'e' truncated to MODE. So only do
7962 this if flag_unsafe_math_optimizations is set. */
7963 if (flag_unsafe_math_optimizations && func == mpfr_log)
7965 const REAL_VALUE_TYPE e_truncated =
7966 real_value_truncate (TYPE_MODE (type), dconste);
7967 if (real_dconstp (arg, &e_truncated))
7968 return build_real (type, dconst1);
7971 /* Calculate the result when the argument is a constant. */
7972 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7973 return res;
7975 /* Special case, optimize logN(expN(x)) = x. */
7976 if (flag_unsafe_math_optimizations
7977 && ((func == mpfr_log
7978 && (fcode == BUILT_IN_EXP
7979 || fcode == BUILT_IN_EXPF
7980 || fcode == BUILT_IN_EXPL))
7981 || (func == mpfr_log2
7982 && (fcode == BUILT_IN_EXP2
7983 || fcode == BUILT_IN_EXP2F
7984 || fcode == BUILT_IN_EXP2L))
7985 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7986 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
7988 /* Optimize logN(func()) for various exponential functions. We
7989 want to determine the value "x" and the power "exponent" in
7990 order to transform logN(x**exponent) into exponent*logN(x). */
7991 if (flag_unsafe_math_optimizations)
7993 tree exponent = 0, x = 0;
7995 switch (fcode)
7997 CASE_FLT_FN (BUILT_IN_EXP):
7998 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7999 x = build_real (type,
8000 real_value_truncate (TYPE_MODE (type), dconste));
8001 exponent = CALL_EXPR_ARG (arg, 0);
8002 break;
8003 CASE_FLT_FN (BUILT_IN_EXP2):
8004 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8005 x = build_real (type, dconst2);
8006 exponent = CALL_EXPR_ARG (arg, 0);
8007 break;
8008 CASE_FLT_FN (BUILT_IN_EXP10):
8009 CASE_FLT_FN (BUILT_IN_POW10):
8010 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8011 x = build_real (type, dconst10);
8012 exponent = CALL_EXPR_ARG (arg, 0);
8013 break;
8014 CASE_FLT_FN (BUILT_IN_SQRT):
8015 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8016 x = CALL_EXPR_ARG (arg, 0);
8017 exponent = build_real (type, dconsthalf);
8018 break;
8019 CASE_FLT_FN (BUILT_IN_CBRT):
8020 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8021 x = CALL_EXPR_ARG (arg, 0);
8022 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8023 dconstthird));
8024 break;
8025 CASE_FLT_FN (BUILT_IN_POW):
8026 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8027 x = CALL_EXPR_ARG (arg, 0);
8028 exponent = CALL_EXPR_ARG (arg, 1);
8029 break;
8030 default:
8031 break;
8034 /* Now perform the optimization. */
8035 if (x && exponent)
8037 tree logfn = build_call_expr (fndecl, 1, x);
8038 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8043 return NULL_TREE;
8046 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8047 NULL_TREE if no simplification can be made. */
8049 static tree
8050 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8052 tree res, narg0, narg1;
8054 if (!validate_arg (arg0, REAL_TYPE)
8055 || !validate_arg (arg1, REAL_TYPE))
8056 return NULL_TREE;
8058 /* Calculate the result when the argument is a constant. */
8059 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8060 return res;
8062 /* If either argument to hypot has a negate or abs, strip that off.
8063 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8064 narg0 = fold_strip_sign_ops (arg0);
8065 narg1 = fold_strip_sign_ops (arg1);
8066 if (narg0 || narg1)
8068 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8069 narg1 ? narg1 : arg1);
8072 /* If either argument is zero, hypot is fabs of the other. */
8073 if (real_zerop (arg0))
8074 return fold_build1 (ABS_EXPR, type, arg1);
8075 else if (real_zerop (arg1))
8076 return fold_build1 (ABS_EXPR, type, arg0);
8078 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8079 if (flag_unsafe_math_optimizations
8080 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8082 const REAL_VALUE_TYPE sqrt2_trunc
8083 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
8084 return fold_build2 (MULT_EXPR, type,
8085 fold_build1 (ABS_EXPR, type, arg0),
8086 build_real (type, sqrt2_trunc));
8089 return NULL_TREE;
8093 /* Fold a builtin function call to pow, powf, or powl. Return
8094 NULL_TREE if no simplification can be made. */
8095 static tree
8096 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8098 tree res;
8100 if (!validate_arg (arg0, REAL_TYPE)
8101 || !validate_arg (arg1, REAL_TYPE))
8102 return NULL_TREE;
8104 /* Calculate the result when the argument is a constant. */
8105 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8106 return res;
8108 /* Optimize pow(1.0,y) = 1.0. */
8109 if (real_onep (arg0))
8110 return omit_one_operand (type, build_real (type, dconst1), arg1);
8112 if (TREE_CODE (arg1) == REAL_CST
8113 && !TREE_OVERFLOW (arg1))
8115 REAL_VALUE_TYPE cint;
8116 REAL_VALUE_TYPE c;
8117 HOST_WIDE_INT n;
8119 c = TREE_REAL_CST (arg1);
8121 /* Optimize pow(x,0.0) = 1.0. */
8122 if (REAL_VALUES_EQUAL (c, dconst0))
8123 return omit_one_operand (type, build_real (type, dconst1),
8124 arg0);
8126 /* Optimize pow(x,1.0) = x. */
8127 if (REAL_VALUES_EQUAL (c, dconst1))
8128 return arg0;
8130 /* Optimize pow(x,-1.0) = 1.0/x. */
8131 if (REAL_VALUES_EQUAL (c, dconstm1))
8132 return fold_build2 (RDIV_EXPR, type,
8133 build_real (type, dconst1), arg0);
8135 /* Optimize pow(x,0.5) = sqrt(x). */
8136 if (flag_unsafe_math_optimizations
8137 && REAL_VALUES_EQUAL (c, dconsthalf))
8139 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8141 if (sqrtfn != NULL_TREE)
8142 return build_call_expr (sqrtfn, 1, arg0);
8145 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8146 if (flag_unsafe_math_optimizations)
8148 const REAL_VALUE_TYPE dconstroot
8149 = real_value_truncate (TYPE_MODE (type), dconstthird);
8151 if (REAL_VALUES_EQUAL (c, dconstroot))
8153 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8154 if (cbrtfn != NULL_TREE)
8155 return build_call_expr (cbrtfn, 1, arg0);
8159 /* Check for an integer exponent. */
8160 n = real_to_integer (&c);
8161 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8162 if (real_identical (&c, &cint))
8164 /* Attempt to evaluate pow at compile-time. */
8165 if (TREE_CODE (arg0) == REAL_CST
8166 && !TREE_OVERFLOW (arg0))
8168 REAL_VALUE_TYPE x;
8169 bool inexact;
8171 x = TREE_REAL_CST (arg0);
8172 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8173 if (flag_unsafe_math_optimizations || !inexact)
8174 return build_real (type, x);
8177 /* Strip sign ops from even integer powers. */
8178 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8180 tree narg0 = fold_strip_sign_ops (arg0);
8181 if (narg0)
8182 return build_call_expr (fndecl, 2, narg0, arg1);
8187 if (flag_unsafe_math_optimizations)
8189 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8191 /* Optimize pow(expN(x),y) = expN(x*y). */
8192 if (BUILTIN_EXPONENT_P (fcode))
8194 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8195 tree arg = CALL_EXPR_ARG (arg0, 0);
8196 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8197 return build_call_expr (expfn, 1, arg);
8200 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8201 if (BUILTIN_SQRT_P (fcode))
8203 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8204 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8205 build_real (type, dconsthalf));
8206 return build_call_expr (fndecl, 2, narg0, narg1);
8209 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8210 if (BUILTIN_CBRT_P (fcode))
8212 tree arg = CALL_EXPR_ARG (arg0, 0);
8213 if (tree_expr_nonnegative_p (arg))
8215 const REAL_VALUE_TYPE dconstroot
8216 = real_value_truncate (TYPE_MODE (type), dconstthird);
8217 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8218 build_real (type, dconstroot));
8219 return build_call_expr (fndecl, 2, arg, narg1);
8223 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8224 if (fcode == BUILT_IN_POW
8225 || fcode == BUILT_IN_POWF
8226 || fcode == BUILT_IN_POWL)
8228 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8229 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8230 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8231 return build_call_expr (fndecl, 2, arg00, narg1);
8235 return NULL_TREE;
8238 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8239 Return NULL_TREE if no simplification can be made. */
8240 static tree
8241 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8242 tree arg0, tree arg1, tree type)
8244 if (!validate_arg (arg0, REAL_TYPE)
8245 || !validate_arg (arg1, INTEGER_TYPE))
8246 return NULL_TREE;
8248 /* Optimize pow(1.0,y) = 1.0. */
8249 if (real_onep (arg0))
8250 return omit_one_operand (type, build_real (type, dconst1), arg1);
8252 if (host_integerp (arg1, 0))
8254 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8256 /* Evaluate powi at compile-time. */
8257 if (TREE_CODE (arg0) == REAL_CST
8258 && !TREE_OVERFLOW (arg0))
8260 REAL_VALUE_TYPE x;
8261 x = TREE_REAL_CST (arg0);
8262 real_powi (&x, TYPE_MODE (type), &x, c);
8263 return build_real (type, x);
8266 /* Optimize pow(x,0) = 1.0. */
8267 if (c == 0)
8268 return omit_one_operand (type, build_real (type, dconst1),
8269 arg0);
8271 /* Optimize pow(x,1) = x. */
8272 if (c == 1)
8273 return arg0;
8275 /* Optimize pow(x,-1) = 1.0/x. */
8276 if (c == -1)
8277 return fold_build2 (RDIV_EXPR, type,
8278 build_real (type, dconst1), arg0);
8281 return NULL_TREE;
8284 /* A subroutine of fold_builtin to fold the various exponent
8285 functions. Return NULL_TREE if no simplification can be made.
8286 FUNC is the corresponding MPFR exponent function. */
8288 static tree
8289 fold_builtin_exponent (tree fndecl, tree arg,
8290 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8292 if (validate_arg (arg, REAL_TYPE))
8294 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8295 tree res;
8297 /* Calculate the result when the argument is a constant. */
8298 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8299 return res;
8301 /* Optimize expN(logN(x)) = x. */
8302 if (flag_unsafe_math_optimizations)
8304 const enum built_in_function fcode = builtin_mathfn_code (arg);
8306 if ((func == mpfr_exp
8307 && (fcode == BUILT_IN_LOG
8308 || fcode == BUILT_IN_LOGF
8309 || fcode == BUILT_IN_LOGL))
8310 || (func == mpfr_exp2
8311 && (fcode == BUILT_IN_LOG2
8312 || fcode == BUILT_IN_LOG2F
8313 || fcode == BUILT_IN_LOG2L))
8314 || (func == mpfr_exp10
8315 && (fcode == BUILT_IN_LOG10
8316 || fcode == BUILT_IN_LOG10F
8317 || fcode == BUILT_IN_LOG10L)))
8318 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8322 return NULL_TREE;
8325 /* Return true if VAR is a VAR_DECL or a component thereof. */
8327 static bool
8328 var_decl_component_p (tree var)
8330 tree inner = var;
8331 while (handled_component_p (inner))
8332 inner = TREE_OPERAND (inner, 0);
8333 return SSA_VAR_P (inner);
8336 /* Fold function call to builtin memset. Return
8337 NULL_TREE if no simplification can be made. */
8339 static tree
8340 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8342 tree var, ret;
8343 unsigned HOST_WIDE_INT length, cval;
8345 if (! validate_arg (dest, POINTER_TYPE)
8346 || ! validate_arg (c, INTEGER_TYPE)
8347 || ! validate_arg (len, INTEGER_TYPE))
8348 return NULL_TREE;
8350 if (! host_integerp (len, 1))
8351 return NULL_TREE;
8353 /* If the LEN parameter is zero, return DEST. */
8354 if (integer_zerop (len))
8355 return omit_one_operand (type, dest, c);
8357 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8358 return NULL_TREE;
8360 var = dest;
8361 STRIP_NOPS (var);
8362 if (TREE_CODE (var) != ADDR_EXPR)
8363 return NULL_TREE;
8365 var = TREE_OPERAND (var, 0);
8366 if (TREE_THIS_VOLATILE (var))
8367 return NULL_TREE;
8369 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8370 && !POINTER_TYPE_P (TREE_TYPE (var)))
8371 return NULL_TREE;
8373 if (! var_decl_component_p (var))
8374 return NULL_TREE;
8376 length = tree_low_cst (len, 1);
8377 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8378 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8379 < (int) length)
8380 return NULL_TREE;
8382 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8383 return NULL_TREE;
8385 if (integer_zerop (c))
8386 cval = 0;
8387 else
8389 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8390 return NULL_TREE;
8392 cval = tree_low_cst (c, 1);
8393 cval &= 0xff;
8394 cval |= cval << 8;
8395 cval |= cval << 16;
8396 cval |= (cval << 31) << 1;
8399 ret = build_int_cst_type (TREE_TYPE (var), cval);
8400 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8401 if (ignore)
8402 return ret;
8404 return omit_one_operand (type, dest, ret);
8407 /* Fold function call to builtin memset. Return
8408 NULL_TREE if no simplification can be made. */
8410 static tree
8411 fold_builtin_bzero (tree dest, tree size, bool ignore)
8413 if (! validate_arg (dest, POINTER_TYPE)
8414 || ! validate_arg (size, INTEGER_TYPE))
8415 return NULL_TREE;
8417 if (!ignore)
8418 return NULL_TREE;
8420 /* New argument list transforming bzero(ptr x, int y) to
8421 memset(ptr x, int 0, size_t y). This is done this way
8422 so that if it isn't expanded inline, we fallback to
8423 calling bzero instead of memset. */
8425 return fold_builtin_memset (dest, integer_zero_node,
8426 fold_convert (sizetype, size),
8427 void_type_node, ignore);
8430 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8431 NULL_TREE if no simplification can be made.
8432 If ENDP is 0, return DEST (like memcpy).
8433 If ENDP is 1, return DEST+LEN (like mempcpy).
8434 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8435 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8436 (memmove). */
8438 static tree
8439 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8441 tree destvar, srcvar, expr;
8443 if (! validate_arg (dest, POINTER_TYPE)
8444 || ! validate_arg (src, POINTER_TYPE)
8445 || ! validate_arg (len, INTEGER_TYPE))
8446 return NULL_TREE;
8448 /* If the LEN parameter is zero, return DEST. */
8449 if (integer_zerop (len))
8450 return omit_one_operand (type, dest, src);
8452 /* If SRC and DEST are the same (and not volatile), return
8453 DEST{,+LEN,+LEN-1}. */
8454 if (operand_equal_p (src, dest, 0))
8455 expr = len;
8456 else
8458 tree srctype, desttype;
8459 if (endp == 3)
8461 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8462 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8464 /* Both DEST and SRC must be pointer types.
8465 ??? This is what old code did. Is the testing for pointer types
8466 really mandatory?
8468 If either SRC is readonly or length is 1, we can use memcpy. */
8469 if (dest_align && src_align
8470 && (readonly_data_expr (src)
8471 || (host_integerp (len, 1)
8472 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8473 tree_low_cst (len, 1)))))
8475 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8476 if (!fn)
8477 return NULL_TREE;
8478 return build_call_expr (fn, 3, dest, src, len);
8480 return NULL_TREE;
8483 if (!host_integerp (len, 0))
8484 return NULL_TREE;
8485 /* FIXME:
8486 This logic lose for arguments like (type *)malloc (sizeof (type)),
8487 since we strip the casts of up to VOID return value from malloc.
8488 Perhaps we ought to inherit type from non-VOID argument here? */
8489 STRIP_NOPS (src);
8490 STRIP_NOPS (dest);
8491 srctype = TREE_TYPE (TREE_TYPE (src));
8492 desttype = TREE_TYPE (TREE_TYPE (dest));
8493 if (!srctype || !desttype
8494 || !TYPE_SIZE_UNIT (srctype)
8495 || !TYPE_SIZE_UNIT (desttype)
8496 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8497 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8498 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8499 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8500 return NULL_TREE;
8502 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8503 < (int) TYPE_ALIGN (desttype)
8504 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8505 < (int) TYPE_ALIGN (srctype)))
8506 return NULL_TREE;
8508 if (!ignore)
8509 dest = builtin_save_expr (dest);
8511 srcvar = build_fold_indirect_ref (src);
8512 if (TREE_THIS_VOLATILE (srcvar))
8513 return NULL_TREE;
8514 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8515 return NULL_TREE;
8516 /* With memcpy, it is possible to bypass aliasing rules, so without
8517 this check i. e. execute/20060930-2.c would be misoptimized, because
8518 it use conflicting alias set to hold argument for the memcpy call.
8519 This check is probably unnecesary with -fno-strict-aliasing.
8520 Similarly for destvar. See also PR29286. */
8521 if (!var_decl_component_p (srcvar)
8522 /* Accept: memcpy (*char_var, "test", 1); that simplify
8523 to char_var='t'; */
8524 || is_gimple_min_invariant (srcvar)
8525 || readonly_data_expr (src))
8526 return NULL_TREE;
8528 destvar = build_fold_indirect_ref (dest);
8529 if (TREE_THIS_VOLATILE (destvar))
8530 return NULL_TREE;
8531 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8532 return NULL_TREE;
8533 if (!var_decl_component_p (destvar))
8534 return NULL_TREE;
8536 if (srctype == desttype
8537 || (gimple_in_ssa_p (cfun)
8538 && tree_ssa_useless_type_conversion_1 (desttype, srctype)))
8539 expr = srcvar;
8540 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8541 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8542 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8543 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8544 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8545 else
8546 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8547 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8550 if (ignore)
8551 return expr;
8553 if (endp == 0 || endp == 3)
8554 return omit_one_operand (type, dest, expr);
8556 if (expr == len)
8557 expr = NULL_TREE;
8559 if (endp == 2)
8560 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8561 ssize_int (1));
8563 len = fold_convert (TREE_TYPE (dest), len);
8564 dest = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
8565 dest = fold_convert (type, dest);
8566 if (expr)
8567 dest = omit_one_operand (type, dest, expr);
8568 return dest;
8571 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8572 If LEN is not NULL, it represents the length of the string to be
8573 copied. Return NULL_TREE if no simplification can be made. */
8575 tree
8576 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8578 tree fn;
8580 if (!validate_arg (dest, POINTER_TYPE)
8581 || !validate_arg (src, POINTER_TYPE))
8582 return NULL_TREE;
8584 /* If SRC and DEST are the same (and not volatile), return DEST. */
8585 if (operand_equal_p (src, dest, 0))
8586 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8588 if (optimize_size)
8589 return NULL_TREE;
8591 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8592 if (!fn)
8593 return NULL_TREE;
8595 if (!len)
8597 len = c_strlen (src, 1);
8598 if (! len || TREE_SIDE_EFFECTS (len))
8599 return NULL_TREE;
8602 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8603 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8604 build_call_expr (fn, 3, dest, src, len));
8607 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8608 If SLEN is not NULL, it represents the length of the source string.
8609 Return NULL_TREE if no simplification can be made. */
8611 tree
8612 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8614 tree fn;
8616 if (!validate_arg (dest, POINTER_TYPE)
8617 || !validate_arg (src, POINTER_TYPE)
8618 || !validate_arg (len, INTEGER_TYPE))
8619 return NULL_TREE;
8621 /* If the LEN parameter is zero, return DEST. */
8622 if (integer_zerop (len))
8623 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8625 /* We can't compare slen with len as constants below if len is not a
8626 constant. */
8627 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8628 return NULL_TREE;
8630 if (!slen)
8631 slen = c_strlen (src, 1);
8633 /* Now, we must be passed a constant src ptr parameter. */
8634 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8635 return NULL_TREE;
8637 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8639 /* We do not support simplification of this case, though we do
8640 support it when expanding trees into RTL. */
8641 /* FIXME: generate a call to __builtin_memset. */
8642 if (tree_int_cst_lt (slen, len))
8643 return NULL_TREE;
8645 /* OK transform into builtin memcpy. */
8646 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8647 if (!fn)
8648 return NULL_TREE;
8649 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8650 build_call_expr (fn, 3, dest, src, len));
8653 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8654 Return NULL_TREE if no simplification can be made. */
8656 static tree
8657 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8659 const char *p1, *p2;
8661 if (!validate_arg (arg1, POINTER_TYPE)
8662 || !validate_arg (arg2, POINTER_TYPE)
8663 || !validate_arg (len, INTEGER_TYPE))
8664 return NULL_TREE;
8666 /* If the LEN parameter is zero, return zero. */
8667 if (integer_zerop (len))
8668 return omit_two_operands (integer_type_node, integer_zero_node,
8669 arg1, arg2);
8671 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8672 if (operand_equal_p (arg1, arg2, 0))
8673 return omit_one_operand (integer_type_node, integer_zero_node, len);
8675 p1 = c_getstr (arg1);
8676 p2 = c_getstr (arg2);
8678 /* If all arguments are constant, and the value of len is not greater
8679 than the lengths of arg1 and arg2, evaluate at compile-time. */
8680 if (host_integerp (len, 1) && p1 && p2
8681 && compare_tree_int (len, strlen (p1) + 1) <= 0
8682 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8684 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8686 if (r > 0)
8687 return integer_one_node;
8688 else if (r < 0)
8689 return integer_minus_one_node;
8690 else
8691 return integer_zero_node;
8694 /* If len parameter is one, return an expression corresponding to
8695 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8696 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8698 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8699 tree cst_uchar_ptr_node
8700 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8702 tree ind1 = fold_convert (integer_type_node,
8703 build1 (INDIRECT_REF, cst_uchar_node,
8704 fold_convert (cst_uchar_ptr_node,
8705 arg1)));
8706 tree ind2 = fold_convert (integer_type_node,
8707 build1 (INDIRECT_REF, cst_uchar_node,
8708 fold_convert (cst_uchar_ptr_node,
8709 arg2)));
8710 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8713 return NULL_TREE;
8716 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8717 Return NULL_TREE if no simplification can be made. */
8719 static tree
8720 fold_builtin_strcmp (tree arg1, tree arg2)
8722 const char *p1, *p2;
8724 if (!validate_arg (arg1, POINTER_TYPE)
8725 || !validate_arg (arg2, POINTER_TYPE))
8726 return NULL_TREE;
8728 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8729 if (operand_equal_p (arg1, arg2, 0))
8730 return integer_zero_node;
8732 p1 = c_getstr (arg1);
8733 p2 = c_getstr (arg2);
8735 if (p1 && p2)
8737 const int i = strcmp (p1, p2);
8738 if (i < 0)
8739 return integer_minus_one_node;
8740 else if (i > 0)
8741 return integer_one_node;
8742 else
8743 return integer_zero_node;
8746 /* If the second arg is "", return *(const unsigned char*)arg1. */
8747 if (p2 && *p2 == '\0')
8749 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8750 tree cst_uchar_ptr_node
8751 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8753 return fold_convert (integer_type_node,
8754 build1 (INDIRECT_REF, cst_uchar_node,
8755 fold_convert (cst_uchar_ptr_node,
8756 arg1)));
8759 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8760 if (p1 && *p1 == '\0')
8762 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8763 tree cst_uchar_ptr_node
8764 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8766 tree temp = fold_convert (integer_type_node,
8767 build1 (INDIRECT_REF, cst_uchar_node,
8768 fold_convert (cst_uchar_ptr_node,
8769 arg2)));
8770 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8773 return NULL_TREE;
8776 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8777 Return NULL_TREE if no simplification can be made. */
8779 static tree
8780 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
8782 const char *p1, *p2;
8784 if (!validate_arg (arg1, POINTER_TYPE)
8785 || !validate_arg (arg2, POINTER_TYPE)
8786 || !validate_arg (len, INTEGER_TYPE))
8787 return NULL_TREE;
8789 /* If the LEN parameter is zero, return zero. */
8790 if (integer_zerop (len))
8791 return omit_two_operands (integer_type_node, integer_zero_node,
8792 arg1, arg2);
8794 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8795 if (operand_equal_p (arg1, arg2, 0))
8796 return omit_one_operand (integer_type_node, integer_zero_node, len);
8798 p1 = c_getstr (arg1);
8799 p2 = c_getstr (arg2);
8801 if (host_integerp (len, 1) && p1 && p2)
8803 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8804 if (i > 0)
8805 return integer_one_node;
8806 else if (i < 0)
8807 return integer_minus_one_node;
8808 else
8809 return integer_zero_node;
8812 /* If the second arg is "", and the length is greater than zero,
8813 return *(const unsigned char*)arg1. */
8814 if (p2 && *p2 == '\0'
8815 && TREE_CODE (len) == INTEGER_CST
8816 && tree_int_cst_sgn (len) == 1)
8818 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8819 tree cst_uchar_ptr_node
8820 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8822 return fold_convert (integer_type_node,
8823 build1 (INDIRECT_REF, cst_uchar_node,
8824 fold_convert (cst_uchar_ptr_node,
8825 arg1)));
8828 /* If the first arg is "", and the length is greater than zero,
8829 return -*(const unsigned char*)arg2. */
8830 if (p1 && *p1 == '\0'
8831 && TREE_CODE (len) == INTEGER_CST
8832 && tree_int_cst_sgn (len) == 1)
8834 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8835 tree cst_uchar_ptr_node
8836 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8838 tree temp = fold_convert (integer_type_node,
8839 build1 (INDIRECT_REF, cst_uchar_node,
8840 fold_convert (cst_uchar_ptr_node,
8841 arg2)));
8842 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8845 /* If len parameter is one, return an expression corresponding to
8846 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8847 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8849 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8850 tree cst_uchar_ptr_node
8851 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8853 tree ind1 = fold_convert (integer_type_node,
8854 build1 (INDIRECT_REF, cst_uchar_node,
8855 fold_convert (cst_uchar_ptr_node,
8856 arg1)));
8857 tree ind2 = fold_convert (integer_type_node,
8858 build1 (INDIRECT_REF, cst_uchar_node,
8859 fold_convert (cst_uchar_ptr_node,
8860 arg2)));
8861 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8864 return NULL_TREE;
8867 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8868 ARG. Return NULL_TREE if no simplification can be made. */
8870 static tree
8871 fold_builtin_signbit (tree arg, tree type)
8873 tree temp;
8875 if (!validate_arg (arg, REAL_TYPE))
8876 return NULL_TREE;
8878 /* If ARG is a compile-time constant, determine the result. */
8879 if (TREE_CODE (arg) == REAL_CST
8880 && !TREE_OVERFLOW (arg))
8882 REAL_VALUE_TYPE c;
8884 c = TREE_REAL_CST (arg);
8885 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8886 return fold_convert (type, temp);
8889 /* If ARG is non-negative, the result is always zero. */
8890 if (tree_expr_nonnegative_p (arg))
8891 return omit_one_operand (type, integer_zero_node, arg);
8893 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8894 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8895 return fold_build2 (LT_EXPR, type, arg,
8896 build_real (TREE_TYPE (arg), dconst0));
8898 return NULL_TREE;
8901 /* Fold function call to builtin copysign, copysignf or copysignl with
8902 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8903 be made. */
8905 static tree
8906 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
8908 tree tem;
8910 if (!validate_arg (arg1, REAL_TYPE)
8911 || !validate_arg (arg2, REAL_TYPE))
8912 return NULL_TREE;
8914 /* copysign(X,X) is X. */
8915 if (operand_equal_p (arg1, arg2, 0))
8916 return fold_convert (type, arg1);
8918 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8919 if (TREE_CODE (arg1) == REAL_CST
8920 && TREE_CODE (arg2) == REAL_CST
8921 && !TREE_OVERFLOW (arg1)
8922 && !TREE_OVERFLOW (arg2))
8924 REAL_VALUE_TYPE c1, c2;
8926 c1 = TREE_REAL_CST (arg1);
8927 c2 = TREE_REAL_CST (arg2);
8928 /* c1.sign := c2.sign. */
8929 real_copysign (&c1, &c2);
8930 return build_real (type, c1);
8933 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8934 Remember to evaluate Y for side-effects. */
8935 if (tree_expr_nonnegative_p (arg2))
8936 return omit_one_operand (type,
8937 fold_build1 (ABS_EXPR, type, arg1),
8938 arg2);
8940 /* Strip sign changing operations for the first argument. */
8941 tem = fold_strip_sign_ops (arg1);
8942 if (tem)
8943 return build_call_expr (fndecl, 2, tem, arg2);
8945 return NULL_TREE;
8948 /* Fold a call to builtin isascii with argument ARG. */
8950 static tree
8951 fold_builtin_isascii (tree arg)
8953 if (!validate_arg (arg, INTEGER_TYPE))
8954 return NULL_TREE;
8955 else
8957 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8958 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
8959 build_int_cst (NULL_TREE,
8960 ~ (unsigned HOST_WIDE_INT) 0x7f));
8961 return fold_build2 (EQ_EXPR, integer_type_node,
8962 arg, integer_zero_node);
8966 /* Fold a call to builtin toascii with argument ARG. */
8968 static tree
8969 fold_builtin_toascii (tree arg)
8971 if (!validate_arg (arg, INTEGER_TYPE))
8972 return NULL_TREE;
8974 /* Transform toascii(c) -> (c & 0x7f). */
8975 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8976 build_int_cst (NULL_TREE, 0x7f));
8979 /* Fold a call to builtin isdigit with argument ARG. */
8981 static tree
8982 fold_builtin_isdigit (tree arg)
8984 if (!validate_arg (arg, INTEGER_TYPE))
8985 return NULL_TREE;
8986 else
8988 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8989 /* According to the C standard, isdigit is unaffected by locale.
8990 However, it definitely is affected by the target character set. */
8991 unsigned HOST_WIDE_INT target_digit0
8992 = lang_hooks.to_target_charset ('0');
8994 if (target_digit0 == 0)
8995 return NULL_TREE;
8997 arg = fold_convert (unsigned_type_node, arg);
8998 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
8999 build_int_cst (unsigned_type_node, target_digit0));
9000 return fold_build2 (LE_EXPR, integer_type_node, arg,
9001 build_int_cst (unsigned_type_node, 9));
9005 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9007 static tree
9008 fold_builtin_fabs (tree arg, tree type)
9010 if (!validate_arg (arg, REAL_TYPE))
9011 return NULL_TREE;
9013 arg = fold_convert (type, arg);
9014 if (TREE_CODE (arg) == REAL_CST)
9015 return fold_abs_const (arg, type);
9016 return fold_build1 (ABS_EXPR, type, arg);
9019 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9021 static tree
9022 fold_builtin_abs (tree arg, tree type)
9024 if (!validate_arg (arg, INTEGER_TYPE))
9025 return NULL_TREE;
9027 arg = fold_convert (type, arg);
9028 if (TREE_CODE (arg) == INTEGER_CST)
9029 return fold_abs_const (arg, type);
9030 return fold_build1 (ABS_EXPR, type, arg);
9033 /* Fold a call to builtin fmin or fmax. */
9035 static tree
9036 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9038 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9040 /* Calculate the result when the argument is a constant. */
9041 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9043 if (res)
9044 return res;
9046 /* If either argument is NaN, return the other one. Avoid the
9047 transformation if we get (and honor) a signalling NaN. Using
9048 omit_one_operand() ensures we create a non-lvalue. */
9049 if (TREE_CODE (arg0) == REAL_CST
9050 && real_isnan (&TREE_REAL_CST (arg0))
9051 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9052 || ! TREE_REAL_CST (arg0).signalling))
9053 return omit_one_operand (type, arg1, arg0);
9054 if (TREE_CODE (arg1) == REAL_CST
9055 && real_isnan (&TREE_REAL_CST (arg1))
9056 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9057 || ! TREE_REAL_CST (arg1).signalling))
9058 return omit_one_operand (type, arg0, arg1);
9060 /* Transform fmin/fmax(x,x) -> x. */
9061 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9062 return omit_one_operand (type, arg0, arg1);
9064 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9065 functions to return the numeric arg if the other one is NaN.
9066 These tree codes don't honor that, so only transform if
9067 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9068 handled, so we don't have to worry about it either. */
9069 if (flag_finite_math_only)
9070 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9071 fold_convert (type, arg0),
9072 fold_convert (type, arg1));
9074 return NULL_TREE;
9077 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9079 static tree
9080 fold_builtin_carg (tree arg, tree type)
9082 if (validate_arg (arg, COMPLEX_TYPE))
9084 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9086 if (atan2_fn)
9088 tree new_arg = builtin_save_expr (arg);
9089 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9090 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9091 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9095 return NULL_TREE;
9098 /* Fold a call to builtin logb/ilogb. */
9100 static tree
9101 fold_builtin_logb (tree arg, tree rettype)
9103 if (! validate_arg (arg, REAL_TYPE))
9104 return NULL_TREE;
9106 STRIP_NOPS (arg);
9108 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9110 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9112 switch (value->cl)
9114 case rvc_nan:
9115 case rvc_inf:
9116 /* If arg is Inf or NaN and we're logb, return it. */
9117 if (TREE_CODE (rettype) == REAL_TYPE)
9118 return fold_convert (rettype, arg);
9119 /* Fall through... */
9120 case rvc_zero:
9121 /* Zero may set errno and/or raise an exception for logb, also
9122 for ilogb we don't know FP_ILOGB0. */
9123 return NULL_TREE;
9124 case rvc_normal:
9125 /* For normal numbers, proceed iff radix == 2. In GCC,
9126 normalized significands are in the range [0.5, 1.0). We
9127 want the exponent as if they were [1.0, 2.0) so get the
9128 exponent and subtract 1. */
9129 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9130 return fold_convert (rettype, build_int_cst (NULL_TREE,
9131 REAL_EXP (value)-1));
9132 break;
9136 return NULL_TREE;
9139 /* Fold a call to builtin significand, if radix == 2. */
9141 static tree
9142 fold_builtin_significand (tree arg, tree rettype)
9144 if (! validate_arg (arg, REAL_TYPE))
9145 return NULL_TREE;
9147 STRIP_NOPS (arg);
9149 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9151 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9153 switch (value->cl)
9155 case rvc_zero:
9156 case rvc_nan:
9157 case rvc_inf:
9158 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9159 return fold_convert (rettype, arg);
9160 case rvc_normal:
9161 /* For normal numbers, proceed iff radix == 2. */
9162 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9164 REAL_VALUE_TYPE result = *value;
9165 /* In GCC, normalized significands are in the range [0.5,
9166 1.0). We want them to be [1.0, 2.0) so set the
9167 exponent to 1. */
9168 SET_REAL_EXP (&result, 1);
9169 return build_real (rettype, result);
9171 break;
9175 return NULL_TREE;
9178 /* Fold a call to builtin frexp, we can assume the base is 2. */
9180 static tree
9181 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9183 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9184 return NULL_TREE;
9186 STRIP_NOPS (arg0);
9188 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9189 return NULL_TREE;
9191 arg1 = build_fold_indirect_ref (arg1);
9193 /* Proceed if a valid pointer type was passed in. */
9194 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9196 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9197 tree frac, exp;
9199 switch (value->cl)
9201 case rvc_zero:
9202 /* For +-0, return (*exp = 0, +-0). */
9203 exp = integer_zero_node;
9204 frac = arg0;
9205 break;
9206 case rvc_nan:
9207 case rvc_inf:
9208 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9209 return omit_one_operand (rettype, arg0, arg1);
9210 case rvc_normal:
9212 /* Since the frexp function always expects base 2, and in
9213 GCC normalized significands are already in the range
9214 [0.5, 1.0), we have exactly what frexp wants. */
9215 REAL_VALUE_TYPE frac_rvt = *value;
9216 SET_REAL_EXP (&frac_rvt, 0);
9217 frac = build_real (rettype, frac_rvt);
9218 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9220 break;
9221 default:
9222 gcc_unreachable ();
9225 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9226 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9227 TREE_SIDE_EFFECTS (arg1) = 1;
9228 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9231 return NULL_TREE;
9234 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9235 then we can assume the base is two. If it's false, then we have to
9236 check the mode of the TYPE parameter in certain cases. */
9238 static tree
9239 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9241 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9243 STRIP_NOPS (arg0);
9244 STRIP_NOPS (arg1);
9246 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9247 if (real_zerop (arg0) || integer_zerop (arg1)
9248 || (TREE_CODE (arg0) == REAL_CST
9249 && (real_isnan (&TREE_REAL_CST (arg0))
9250 || real_isinf (&TREE_REAL_CST (arg0)))))
9251 return omit_one_operand (type, arg0, arg1);
9253 /* If both arguments are constant, then try to evaluate it. */
9254 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9255 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9256 && host_integerp (arg1, 0))
9258 /* Bound the maximum adjustment to twice the range of the
9259 mode's valid exponents. Use abs to ensure the range is
9260 positive as a sanity check. */
9261 const long max_exp_adj = 2 *
9262 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9263 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9265 /* Get the user-requested adjustment. */
9266 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9268 /* The requested adjustment must be inside this range. This
9269 is a preliminary cap to avoid things like overflow, we
9270 may still fail to compute the result for other reasons. */
9271 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9273 REAL_VALUE_TYPE initial_result;
9275 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9277 /* Ensure we didn't overflow. */
9278 if (! real_isinf (&initial_result))
9280 const REAL_VALUE_TYPE trunc_result
9281 = real_value_truncate (TYPE_MODE (type), initial_result);
9283 /* Only proceed if the target mode can hold the
9284 resulting value. */
9285 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9286 return build_real (type, trunc_result);
9292 return NULL_TREE;
9295 /* Fold a call to builtin modf. */
9297 static tree
9298 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9300 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9301 return NULL_TREE;
9303 STRIP_NOPS (arg0);
9305 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9306 return NULL_TREE;
9308 arg1 = build_fold_indirect_ref (arg1);
9310 /* Proceed if a valid pointer type was passed in. */
9311 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9313 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9314 REAL_VALUE_TYPE trunc, frac;
9316 switch (value->cl)
9318 case rvc_nan:
9319 case rvc_zero:
9320 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9321 trunc = frac = *value;
9322 break;
9323 case rvc_inf:
9324 /* For +-Inf, return (*arg1 = arg0, +-0). */
9325 frac = dconst0;
9326 frac.sign = value->sign;
9327 trunc = *value;
9328 break;
9329 case rvc_normal:
9330 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9331 real_trunc (&trunc, VOIDmode, value);
9332 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9333 /* If the original number was negative and already
9334 integral, then the fractional part is -0.0. */
9335 if (value->sign && frac.cl == rvc_zero)
9336 frac.sign = value->sign;
9337 break;
9340 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9341 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9342 build_real (rettype, trunc));
9343 TREE_SIDE_EFFECTS (arg1) = 1;
9344 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9345 build_real (rettype, frac));
9348 return NULL_TREE;
9351 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9352 ARG is the argument for the call. */
9354 static tree
9355 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9357 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9358 REAL_VALUE_TYPE r;
9360 if (!validate_arg (arg, REAL_TYPE))
9362 error ("non-floating-point argument to function %qs",
9363 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9364 return error_mark_node;
9367 switch (builtin_index)
9369 case BUILT_IN_ISINF:
9370 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9371 return omit_one_operand (type, integer_zero_node, arg);
9373 if (TREE_CODE (arg) == REAL_CST)
9375 r = TREE_REAL_CST (arg);
9376 if (real_isinf (&r))
9377 return real_compare (GT_EXPR, &r, &dconst0)
9378 ? integer_one_node : integer_minus_one_node;
9379 else
9380 return integer_zero_node;
9383 return NULL_TREE;
9385 case BUILT_IN_FINITE:
9386 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9387 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9388 return omit_one_operand (type, integer_one_node, arg);
9390 if (TREE_CODE (arg) == REAL_CST)
9392 r = TREE_REAL_CST (arg);
9393 return real_isinf (&r) || real_isnan (&r)
9394 ? integer_zero_node : integer_one_node;
9397 return NULL_TREE;
9399 case BUILT_IN_ISNAN:
9400 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9401 return omit_one_operand (type, integer_zero_node, arg);
9403 if (TREE_CODE (arg) == REAL_CST)
9405 r = TREE_REAL_CST (arg);
9406 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9409 arg = builtin_save_expr (arg);
9410 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9412 default:
9413 gcc_unreachable ();
9417 /* Fold a call to an unordered comparison function such as
9418 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9419 being called and ARG0 and ARG1 are the arguments for the call.
9420 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9421 the opposite of the desired result. UNORDERED_CODE is used
9422 for modes that can hold NaNs and ORDERED_CODE is used for
9423 the rest. */
9425 static tree
9426 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9427 enum tree_code unordered_code,
9428 enum tree_code ordered_code)
9430 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9431 enum tree_code code;
9432 tree type0, type1;
9433 enum tree_code code0, code1;
9434 tree cmp_type = NULL_TREE;
9436 type0 = TREE_TYPE (arg0);
9437 type1 = TREE_TYPE (arg1);
9439 code0 = TREE_CODE (type0);
9440 code1 = TREE_CODE (type1);
9442 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9443 /* Choose the wider of two real types. */
9444 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9445 ? type0 : type1;
9446 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9447 cmp_type = type0;
9448 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9449 cmp_type = type1;
9450 else
9452 error ("non-floating-point argument to function %qs",
9453 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9454 return error_mark_node;
9457 arg0 = fold_convert (cmp_type, arg0);
9458 arg1 = fold_convert (cmp_type, arg1);
9460 if (unordered_code == UNORDERED_EXPR)
9462 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9463 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9464 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9467 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9468 : ordered_code;
9469 return fold_build1 (TRUTH_NOT_EXPR, type,
9470 fold_build2 (code, type, arg0, arg1));
9473 /* Fold a call to built-in function FNDECL with 0 arguments.
9474 IGNORE is true if the result of the function call is ignored. This
9475 function returns NULL_TREE if no simplification was possible. */
9477 static tree
9478 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9480 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9481 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9482 switch (fcode)
9484 CASE_FLT_FN (BUILT_IN_INF):
9485 case BUILT_IN_INFD32:
9486 case BUILT_IN_INFD64:
9487 case BUILT_IN_INFD128:
9488 return fold_builtin_inf (type, true);
9490 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9491 return fold_builtin_inf (type, false);
9493 case BUILT_IN_CLASSIFY_TYPE:
9494 return fold_builtin_classify_type (NULL_TREE);
9496 default:
9497 break;
9499 return NULL_TREE;
9502 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9503 IGNORE is true if the result of the function call is ignored. This
9504 function returns NULL_TREE if no simplification was possible. */
9506 static tree
9507 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9509 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9510 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9511 switch (fcode)
9514 case BUILT_IN_CONSTANT_P:
9516 tree val = fold_builtin_constant_p (arg0);
9518 /* Gimplification will pull the CALL_EXPR for the builtin out of
9519 an if condition. When not optimizing, we'll not CSE it back.
9520 To avoid link error types of regressions, return false now. */
9521 if (!val && !optimize)
9522 val = integer_zero_node;
9524 return val;
9527 case BUILT_IN_CLASSIFY_TYPE:
9528 return fold_builtin_classify_type (arg0);
9530 case BUILT_IN_STRLEN:
9531 return fold_builtin_strlen (arg0);
9533 CASE_FLT_FN (BUILT_IN_FABS):
9534 return fold_builtin_fabs (arg0, type);
9536 case BUILT_IN_ABS:
9537 case BUILT_IN_LABS:
9538 case BUILT_IN_LLABS:
9539 case BUILT_IN_IMAXABS:
9540 return fold_builtin_abs (arg0, type);
9542 CASE_FLT_FN (BUILT_IN_CONJ):
9543 if (validate_arg (arg0, COMPLEX_TYPE))
9544 return fold_build1 (CONJ_EXPR, type, arg0);
9545 break;
9547 CASE_FLT_FN (BUILT_IN_CREAL):
9548 if (validate_arg (arg0, COMPLEX_TYPE))
9549 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9550 break;
9552 CASE_FLT_FN (BUILT_IN_CIMAG):
9553 if (validate_arg (arg0, COMPLEX_TYPE))
9554 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9555 break;
9557 CASE_FLT_FN (BUILT_IN_CCOS):
9558 CASE_FLT_FN (BUILT_IN_CCOSH):
9559 /* These functions are "even", i.e. f(x) == f(-x). */
9560 if (validate_arg (arg0, COMPLEX_TYPE))
9562 tree narg = fold_strip_sign_ops (arg0);
9563 if (narg)
9564 return build_call_expr (fndecl, 1, narg);
9566 break;
9568 CASE_FLT_FN (BUILT_IN_CABS):
9569 return fold_builtin_cabs (arg0, type, fndecl);
9571 CASE_FLT_FN (BUILT_IN_CARG):
9572 return fold_builtin_carg (arg0, type);
9574 CASE_FLT_FN (BUILT_IN_SQRT):
9575 return fold_builtin_sqrt (arg0, type);
9577 CASE_FLT_FN (BUILT_IN_CBRT):
9578 return fold_builtin_cbrt (arg0, type);
9580 CASE_FLT_FN (BUILT_IN_ASIN):
9581 if (validate_arg (arg0, REAL_TYPE))
9582 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9583 &dconstm1, &dconst1, true);
9584 break;
9586 CASE_FLT_FN (BUILT_IN_ACOS):
9587 if (validate_arg (arg0, REAL_TYPE))
9588 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9589 &dconstm1, &dconst1, true);
9590 break;
9592 CASE_FLT_FN (BUILT_IN_ATAN):
9593 if (validate_arg (arg0, REAL_TYPE))
9594 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9595 break;
9597 CASE_FLT_FN (BUILT_IN_ASINH):
9598 if (validate_arg (arg0, REAL_TYPE))
9599 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9600 break;
9602 CASE_FLT_FN (BUILT_IN_ACOSH):
9603 if (validate_arg (arg0, REAL_TYPE))
9604 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9605 &dconst1, NULL, true);
9606 break;
9608 CASE_FLT_FN (BUILT_IN_ATANH):
9609 if (validate_arg (arg0, REAL_TYPE))
9610 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9611 &dconstm1, &dconst1, false);
9612 break;
9614 CASE_FLT_FN (BUILT_IN_SIN):
9615 if (validate_arg (arg0, REAL_TYPE))
9616 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9617 break;
9619 CASE_FLT_FN (BUILT_IN_COS):
9620 return fold_builtin_cos (arg0, type, fndecl);
9621 break;
9623 CASE_FLT_FN (BUILT_IN_TAN):
9624 return fold_builtin_tan (arg0, type);
9626 CASE_FLT_FN (BUILT_IN_CEXP):
9627 return fold_builtin_cexp (arg0, type);
9629 CASE_FLT_FN (BUILT_IN_CEXPI):
9630 if (validate_arg (arg0, REAL_TYPE))
9631 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9632 break;
9634 CASE_FLT_FN (BUILT_IN_SINH):
9635 if (validate_arg (arg0, REAL_TYPE))
9636 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9637 break;
9639 CASE_FLT_FN (BUILT_IN_COSH):
9640 return fold_builtin_cosh (arg0, type, fndecl);
9642 CASE_FLT_FN (BUILT_IN_TANH):
9643 if (validate_arg (arg0, REAL_TYPE))
9644 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9645 break;
9647 CASE_FLT_FN (BUILT_IN_ERF):
9648 if (validate_arg (arg0, REAL_TYPE))
9649 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9650 break;
9652 CASE_FLT_FN (BUILT_IN_ERFC):
9653 if (validate_arg (arg0, REAL_TYPE))
9654 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9655 break;
9657 CASE_FLT_FN (BUILT_IN_TGAMMA):
9658 if (validate_arg (arg0, REAL_TYPE))
9659 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9660 break;
9662 CASE_FLT_FN (BUILT_IN_EXP):
9663 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9665 CASE_FLT_FN (BUILT_IN_EXP2):
9666 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9668 CASE_FLT_FN (BUILT_IN_EXP10):
9669 CASE_FLT_FN (BUILT_IN_POW10):
9670 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9672 CASE_FLT_FN (BUILT_IN_EXPM1):
9673 if (validate_arg (arg0, REAL_TYPE))
9674 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9675 break;
9677 CASE_FLT_FN (BUILT_IN_LOG):
9678 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9680 CASE_FLT_FN (BUILT_IN_LOG2):
9681 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9683 CASE_FLT_FN (BUILT_IN_LOG10):
9684 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9686 CASE_FLT_FN (BUILT_IN_LOG1P):
9687 if (validate_arg (arg0, REAL_TYPE))
9688 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9689 &dconstm1, NULL, false);
9690 break;
9692 CASE_FLT_FN (BUILT_IN_NAN):
9693 case BUILT_IN_NAND32:
9694 case BUILT_IN_NAND64:
9695 case BUILT_IN_NAND128:
9696 return fold_builtin_nan (arg0, type, true);
9698 CASE_FLT_FN (BUILT_IN_NANS):
9699 return fold_builtin_nan (arg0, type, false);
9701 CASE_FLT_FN (BUILT_IN_FLOOR):
9702 return fold_builtin_floor (fndecl, arg0);
9704 CASE_FLT_FN (BUILT_IN_CEIL):
9705 return fold_builtin_ceil (fndecl, arg0);
9707 CASE_FLT_FN (BUILT_IN_TRUNC):
9708 return fold_builtin_trunc (fndecl, arg0);
9710 CASE_FLT_FN (BUILT_IN_ROUND):
9711 return fold_builtin_round (fndecl, arg0);
9713 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9714 CASE_FLT_FN (BUILT_IN_RINT):
9715 return fold_trunc_transparent_mathfn (fndecl, arg0);
9717 CASE_FLT_FN (BUILT_IN_LCEIL):
9718 CASE_FLT_FN (BUILT_IN_LLCEIL):
9719 CASE_FLT_FN (BUILT_IN_LFLOOR):
9720 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9721 CASE_FLT_FN (BUILT_IN_LROUND):
9722 CASE_FLT_FN (BUILT_IN_LLROUND):
9723 return fold_builtin_int_roundingfn (fndecl, arg0);
9725 CASE_FLT_FN (BUILT_IN_LRINT):
9726 CASE_FLT_FN (BUILT_IN_LLRINT):
9727 return fold_fixed_mathfn (fndecl, arg0);
9729 case BUILT_IN_BSWAP32:
9730 case BUILT_IN_BSWAP64:
9731 return fold_builtin_bswap (fndecl, arg0);
9733 CASE_INT_FN (BUILT_IN_FFS):
9734 CASE_INT_FN (BUILT_IN_CLZ):
9735 CASE_INT_FN (BUILT_IN_CTZ):
9736 CASE_INT_FN (BUILT_IN_POPCOUNT):
9737 CASE_INT_FN (BUILT_IN_PARITY):
9738 return fold_builtin_bitop (fndecl, arg0);
9740 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9741 return fold_builtin_signbit (arg0, type);
9743 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9744 return fold_builtin_significand (arg0, type);
9746 CASE_FLT_FN (BUILT_IN_ILOGB):
9747 CASE_FLT_FN (BUILT_IN_LOGB):
9748 return fold_builtin_logb (arg0, type);
9750 case BUILT_IN_ISASCII:
9751 return fold_builtin_isascii (arg0);
9753 case BUILT_IN_TOASCII:
9754 return fold_builtin_toascii (arg0);
9756 case BUILT_IN_ISDIGIT:
9757 return fold_builtin_isdigit (arg0);
9759 CASE_FLT_FN (BUILT_IN_FINITE):
9760 case BUILT_IN_FINITED32:
9761 case BUILT_IN_FINITED64:
9762 case BUILT_IN_FINITED128:
9763 return fold_builtin_classify (fndecl, arg0, BUILT_IN_FINITE);
9765 CASE_FLT_FN (BUILT_IN_ISINF):
9766 case BUILT_IN_ISINFD32:
9767 case BUILT_IN_ISINFD64:
9768 case BUILT_IN_ISINFD128:
9769 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
9771 CASE_FLT_FN (BUILT_IN_ISNAN):
9772 case BUILT_IN_ISNAND32:
9773 case BUILT_IN_ISNAND64:
9774 case BUILT_IN_ISNAND128:
9775 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
9777 case BUILT_IN_PRINTF:
9778 case BUILT_IN_PRINTF_UNLOCKED:
9779 case BUILT_IN_VPRINTF:
9780 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
9782 default:
9783 break;
9786 return NULL_TREE;
9790 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9791 IGNORE is true if the result of the function call is ignored. This
9792 function returns NULL_TREE if no simplification was possible. */
9794 static tree
9795 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
9797 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9798 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9800 switch (fcode)
9803 CASE_FLT_FN (BUILT_IN_ATAN2):
9804 if (validate_arg (arg0, REAL_TYPE)
9805 && validate_arg(arg1, REAL_TYPE))
9806 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9807 break;
9809 CASE_FLT_FN (BUILT_IN_FDIM):
9810 if (validate_arg (arg0, REAL_TYPE)
9811 && validate_arg(arg1, REAL_TYPE))
9812 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9813 break;
9815 CASE_FLT_FN (BUILT_IN_HYPOT):
9816 return fold_builtin_hypot (fndecl, arg0, arg1, type);
9818 CASE_FLT_FN (BUILT_IN_LDEXP):
9819 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
9820 CASE_FLT_FN (BUILT_IN_SCALBN):
9821 CASE_FLT_FN (BUILT_IN_SCALBLN):
9822 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
9824 CASE_FLT_FN (BUILT_IN_FREXP):
9825 return fold_builtin_frexp (arg0, arg1, type);
9827 CASE_FLT_FN (BUILT_IN_MODF):
9828 return fold_builtin_modf (arg0, arg1, type);
9830 case BUILT_IN_BZERO:
9831 return fold_builtin_bzero (arg0, arg1, ignore);
9833 case BUILT_IN_FPUTS:
9834 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
9836 case BUILT_IN_FPUTS_UNLOCKED:
9837 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
9839 case BUILT_IN_STRSTR:
9840 return fold_builtin_strstr (arg0, arg1, type);
9842 case BUILT_IN_STRCAT:
9843 return fold_builtin_strcat (arg0, arg1);
9845 case BUILT_IN_STRSPN:
9846 return fold_builtin_strspn (arg0, arg1);
9848 case BUILT_IN_STRCSPN:
9849 return fold_builtin_strcspn (arg0, arg1);
9851 case BUILT_IN_STRCHR:
9852 case BUILT_IN_INDEX:
9853 return fold_builtin_strchr (arg0, arg1, type);
9855 case BUILT_IN_STRRCHR:
9856 case BUILT_IN_RINDEX:
9857 return fold_builtin_strrchr (arg0, arg1, type);
9859 case BUILT_IN_STRCPY:
9860 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
9862 case BUILT_IN_STRCMP:
9863 return fold_builtin_strcmp (arg0, arg1);
9865 case BUILT_IN_STRPBRK:
9866 return fold_builtin_strpbrk (arg0, arg1, type);
9868 case BUILT_IN_EXPECT:
9869 return fold_builtin_expect (arg0);
9871 CASE_FLT_FN (BUILT_IN_POW):
9872 return fold_builtin_pow (fndecl, arg0, arg1, type);
9874 CASE_FLT_FN (BUILT_IN_POWI):
9875 return fold_builtin_powi (fndecl, arg0, arg1, type);
9877 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9878 return fold_builtin_copysign (fndecl, arg0, arg1, type);
9880 CASE_FLT_FN (BUILT_IN_FMIN):
9881 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
9883 CASE_FLT_FN (BUILT_IN_FMAX):
9884 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
9886 case BUILT_IN_ISGREATER:
9887 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
9888 case BUILT_IN_ISGREATEREQUAL:
9889 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
9890 case BUILT_IN_ISLESS:
9891 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
9892 case BUILT_IN_ISLESSEQUAL:
9893 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
9894 case BUILT_IN_ISLESSGREATER:
9895 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9896 case BUILT_IN_ISUNORDERED:
9897 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
9898 NOP_EXPR);
9900 /* We do the folding for va_start in the expander. */
9901 case BUILT_IN_VA_START:
9902 break;
9904 case BUILT_IN_SPRINTF:
9905 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
9907 case BUILT_IN_OBJECT_SIZE:
9908 return fold_builtin_object_size (arg0, arg1);
9910 case BUILT_IN_PRINTF:
9911 case BUILT_IN_PRINTF_UNLOCKED:
9912 case BUILT_IN_VPRINTF:
9913 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
9915 case BUILT_IN_PRINTF_CHK:
9916 case BUILT_IN_VPRINTF_CHK:
9917 if (!validate_arg (arg0, INTEGER_TYPE)
9918 || TREE_SIDE_EFFECTS (arg0))
9919 return NULL_TREE;
9920 else
9921 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
9922 break;
9924 case BUILT_IN_FPRINTF:
9925 case BUILT_IN_FPRINTF_UNLOCKED:
9926 case BUILT_IN_VFPRINTF:
9927 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
9928 ignore, fcode);
9930 default:
9931 break;
9933 return NULL_TREE;
9936 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9937 and ARG2. IGNORE is true if the result of the function call is ignored.
9938 This function returns NULL_TREE if no simplification was possible. */
9940 static tree
9941 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
9943 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9944 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9945 switch (fcode)
9948 CASE_FLT_FN (BUILT_IN_SINCOS):
9949 return fold_builtin_sincos (arg0, arg1, arg2);
9951 CASE_FLT_FN (BUILT_IN_FMA):
9952 if (validate_arg (arg0, REAL_TYPE)
9953 && validate_arg(arg1, REAL_TYPE)
9954 && validate_arg(arg2, REAL_TYPE))
9955 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9956 break;
9958 case BUILT_IN_MEMSET:
9959 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
9961 case BUILT_IN_BCOPY:
9962 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
9964 case BUILT_IN_MEMCPY:
9965 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
9967 case BUILT_IN_MEMPCPY:
9968 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
9970 case BUILT_IN_MEMMOVE:
9971 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
9973 case BUILT_IN_STRNCAT:
9974 return fold_builtin_strncat (arg0, arg1, arg2);
9976 case BUILT_IN_STRNCPY:
9977 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
9979 case BUILT_IN_STRNCMP:
9980 return fold_builtin_strncmp (arg0, arg1, arg2);
9982 case BUILT_IN_BCMP:
9983 case BUILT_IN_MEMCMP:
9984 return fold_builtin_memcmp (arg0, arg1, arg2);;
9986 case BUILT_IN_SPRINTF:
9987 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
9989 case BUILT_IN_STRCPY_CHK:
9990 case BUILT_IN_STPCPY_CHK:
9991 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
9992 ignore, fcode);
9994 case BUILT_IN_STRCAT_CHK:
9995 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
9997 case BUILT_IN_PRINTF_CHK:
9998 case BUILT_IN_VPRINTF_CHK:
9999 if (!validate_arg (arg0, INTEGER_TYPE)
10000 || TREE_SIDE_EFFECTS (arg0))
10001 return NULL_TREE;
10002 else
10003 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10004 break;
10006 case BUILT_IN_FPRINTF:
10007 case BUILT_IN_FPRINTF_UNLOCKED:
10008 case BUILT_IN_VFPRINTF:
10009 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10011 case BUILT_IN_FPRINTF_CHK:
10012 case BUILT_IN_VFPRINTF_CHK:
10013 if (!validate_arg (arg1, INTEGER_TYPE)
10014 || TREE_SIDE_EFFECTS (arg1))
10015 return NULL_TREE;
10016 else
10017 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10018 ignore, fcode);
10020 default:
10021 break;
10023 return NULL_TREE;
10026 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10027 ARG2, and ARG3. IGNORE is true if the result of the function call is
10028 ignored. This function returns NULL_TREE if no simplification was
10029 possible. */
10031 static tree
10032 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10033 bool ignore)
10035 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10037 switch (fcode)
10039 case BUILT_IN_MEMCPY_CHK:
10040 case BUILT_IN_MEMPCPY_CHK:
10041 case BUILT_IN_MEMMOVE_CHK:
10042 case BUILT_IN_MEMSET_CHK:
10043 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10044 NULL_TREE, ignore,
10045 DECL_FUNCTION_CODE (fndecl));
10047 case BUILT_IN_STRNCPY_CHK:
10048 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10050 case BUILT_IN_STRNCAT_CHK:
10051 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10053 case BUILT_IN_FPRINTF_CHK:
10054 case BUILT_IN_VFPRINTF_CHK:
10055 if (!validate_arg (arg1, INTEGER_TYPE)
10056 || TREE_SIDE_EFFECTS (arg1))
10057 return NULL_TREE;
10058 else
10059 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10060 ignore, fcode);
10061 break;
10063 default:
10064 break;
10066 return NULL_TREE;
10069 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10070 arguments, where NARGS <= 4. IGNORE is true if the result of the
10071 function call is ignored. This function returns NULL_TREE if no
10072 simplification was possible. Note that this only folds builtins with
10073 fixed argument patterns. Foldings that do varargs-to-varargs
10074 transformations, or that match calls with more than 4 arguments,
10075 need to be handled with fold_builtin_varargs instead. */
10077 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10079 static tree
10080 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10082 tree ret = NULL_TREE;
10083 switch (nargs)
10085 case 0:
10086 ret = fold_builtin_0 (fndecl, ignore);
10087 break;
10088 case 1:
10089 ret = fold_builtin_1 (fndecl, args[0], ignore);
10090 break;
10091 case 2:
10092 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10093 break;
10094 case 3:
10095 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10096 break;
10097 case 4:
10098 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10099 ignore);
10100 break;
10101 default:
10102 break;
10104 if (ret)
10106 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10107 TREE_NO_WARNING (ret) = 1;
10108 return ret;
10110 return NULL_TREE;
10113 /* Builtins with folding operations that operate on "..." arguments
10114 need special handling; we need to store the arguments in a convenient
10115 data structure before attempting any folding. Fortunately there are
10116 only a few builtins that fall into this category. FNDECL is the
10117 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10118 result of the function call is ignored. */
10120 static tree
10121 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10123 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10124 tree ret = NULL_TREE;
10126 switch (fcode)
10128 case BUILT_IN_SPRINTF_CHK:
10129 case BUILT_IN_VSPRINTF_CHK:
10130 ret = fold_builtin_sprintf_chk (exp, fcode);
10131 break;
10133 case BUILT_IN_SNPRINTF_CHK:
10134 case BUILT_IN_VSNPRINTF_CHK:
10135 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10137 default:
10138 break;
10140 if (ret)
10142 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10143 TREE_NO_WARNING (ret) = 1;
10144 return ret;
10146 return NULL_TREE;
10149 /* A wrapper function for builtin folding that prevents warnings for
10150 "statement without effect" and the like, caused by removing the
10151 call node earlier than the warning is generated. */
10153 tree
10154 fold_call_expr (tree exp, bool ignore)
10156 tree ret = NULL_TREE;
10157 tree fndecl = get_callee_fndecl (exp);
10158 if (fndecl
10159 && TREE_CODE (fndecl) == FUNCTION_DECL
10160 && DECL_BUILT_IN (fndecl))
10162 /* FIXME: Don't use a list in this interface. */
10163 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10164 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10165 else
10167 int nargs = call_expr_nargs (exp);
10168 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10170 tree *args = CALL_EXPR_ARGP (exp);
10171 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10173 if (!ret)
10174 ret = fold_builtin_varargs (fndecl, exp, ignore);
10175 if (ret)
10177 /* Propagate location information from original call to
10178 expansion of builtin. Otherwise things like
10179 maybe_emit_chk_warning, that operate on the expansion
10180 of a builtin, will use the wrong location information. */
10181 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10183 tree realret = ret;
10184 if (TREE_CODE (ret) == NOP_EXPR)
10185 realret = TREE_OPERAND (ret, 0);
10186 if (CAN_HAVE_LOCATION_P (realret)
10187 && !EXPR_HAS_LOCATION (realret))
10188 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10190 return ret;
10194 return NULL_TREE;
10197 /* Conveniently construct a function call expression. FNDECL names the
10198 function to be called and ARGLIST is a TREE_LIST of arguments. */
10200 tree
10201 build_function_call_expr (tree fndecl, tree arglist)
10203 tree fntype = TREE_TYPE (fndecl);
10204 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10205 int n = list_length (arglist);
10206 tree *argarray = (tree *) alloca (n * sizeof (tree));
10207 int i;
10209 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10210 argarray[i] = TREE_VALUE (arglist);
10211 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10214 /* Conveniently construct a function call expression. FNDECL names the
10215 function to be called, N is the number of arguments, and the "..."
10216 parameters are the argument expressions. */
10218 tree
10219 build_call_expr (tree fndecl, int n, ...)
10221 va_list ap;
10222 tree fntype = TREE_TYPE (fndecl);
10223 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10224 tree *argarray = (tree *) alloca (n * sizeof (tree));
10225 int i;
10227 va_start (ap, n);
10228 for (i = 0; i < n; i++)
10229 argarray[i] = va_arg (ap, tree);
10230 va_end (ap);
10231 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10234 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10235 N arguments are passed in the array ARGARRAY. */
10237 tree
10238 fold_builtin_call_array (tree type,
10239 tree fn,
10240 int n,
10241 tree *argarray)
10243 tree ret = NULL_TREE;
10244 int i;
10245 tree exp;
10247 if (TREE_CODE (fn) == ADDR_EXPR)
10249 tree fndecl = TREE_OPERAND (fn, 0);
10250 if (TREE_CODE (fndecl) == FUNCTION_DECL
10251 && DECL_BUILT_IN (fndecl))
10253 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10255 tree arglist = NULL_TREE;
10256 for (i = n - 1; i >= 0; i--)
10257 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10258 ret = targetm.fold_builtin (fndecl, arglist, false);
10259 if (ret)
10260 return ret;
10262 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10264 /* First try the transformations that don't require consing up
10265 an exp. */
10266 ret = fold_builtin_n (fndecl, argarray, n, false);
10267 if (ret)
10268 return ret;
10271 /* If we got this far, we need to build an exp. */
10272 exp = build_call_array (type, fn, n, argarray);
10273 ret = fold_builtin_varargs (fndecl, exp, false);
10274 return ret ? ret : exp;
10278 return build_call_array (type, fn, n, argarray);
10281 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10282 along with N new arguments specified as the "..." parameters. SKIP
10283 is the number of arguments in EXP to be omitted. This function is used
10284 to do varargs-to-varargs transformations. */
10286 static tree
10287 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10289 int oldnargs = call_expr_nargs (exp);
10290 int nargs = oldnargs - skip + n;
10291 tree fntype = TREE_TYPE (fndecl);
10292 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10293 tree *buffer;
10295 if (n > 0)
10297 int i, j;
10298 va_list ap;
10300 buffer = alloca (nargs * sizeof (tree));
10301 va_start (ap, n);
10302 for (i = 0; i < n; i++)
10303 buffer[i] = va_arg (ap, tree);
10304 va_end (ap);
10305 for (j = skip; j < oldnargs; j++, i++)
10306 buffer[i] = CALL_EXPR_ARG (exp, j);
10308 else
10309 buffer = CALL_EXPR_ARGP (exp) + skip;
10311 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10314 /* Validate a single argument ARG against a tree code CODE representing
10315 a type. */
10317 static bool
10318 validate_arg (tree arg, enum tree_code code)
10320 if (!arg)
10321 return false;
10322 else if (code == POINTER_TYPE)
10323 return POINTER_TYPE_P (TREE_TYPE (arg));
10324 return code == TREE_CODE (TREE_TYPE (arg));
10327 /* This function validates the types of a function call argument list
10328 against a specified list of tree_codes. If the last specifier is a 0,
10329 that represents an ellipses, otherwise the last specifier must be a
10330 VOID_TYPE. */
10332 bool
10333 validate_arglist (tree callexpr, ...)
10335 enum tree_code code;
10336 bool res = 0;
10337 va_list ap;
10338 call_expr_arg_iterator iter;
10339 tree arg;
10341 va_start (ap, callexpr);
10342 init_call_expr_arg_iterator (callexpr, &iter);
10346 code = va_arg (ap, enum tree_code);
10347 switch (code)
10349 case 0:
10350 /* This signifies an ellipses, any further arguments are all ok. */
10351 res = true;
10352 goto end;
10353 case VOID_TYPE:
10354 /* This signifies an endlink, if no arguments remain, return
10355 true, otherwise return false. */
10356 res = !more_call_expr_args_p (&iter);
10357 goto end;
10358 default:
10359 /* If no parameters remain or the parameter's code does not
10360 match the specified code, return false. Otherwise continue
10361 checking any remaining arguments. */
10362 arg = next_call_expr_arg (&iter);
10363 if (!validate_arg (arg, code))
10364 goto end;
10365 break;
10368 while (1);
10370 /* We need gotos here since we can only have one VA_CLOSE in a
10371 function. */
10372 end: ;
10373 va_end (ap);
10375 return res;
10378 /* Default target-specific builtin expander that does nothing. */
10381 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10382 rtx target ATTRIBUTE_UNUSED,
10383 rtx subtarget ATTRIBUTE_UNUSED,
10384 enum machine_mode mode ATTRIBUTE_UNUSED,
10385 int ignore ATTRIBUTE_UNUSED)
10387 return NULL_RTX;
10390 /* Returns true is EXP represents data that would potentially reside
10391 in a readonly section. */
10393 static bool
10394 readonly_data_expr (tree exp)
10396 STRIP_NOPS (exp);
10398 if (TREE_CODE (exp) != ADDR_EXPR)
10399 return false;
10401 exp = get_base_address (TREE_OPERAND (exp, 0));
10402 if (!exp)
10403 return false;
10405 /* Make sure we call decl_readonly_section only for trees it
10406 can handle (since it returns true for everything it doesn't
10407 understand). */
10408 if (TREE_CODE (exp) == STRING_CST
10409 || TREE_CODE (exp) == CONSTRUCTOR
10410 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10411 return decl_readonly_section (exp, 0);
10412 else
10413 return false;
10416 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10417 to the call, and TYPE is its return type.
10419 Return NULL_TREE if no simplification was possible, otherwise return the
10420 simplified form of the call as a tree.
10422 The simplified form may be a constant or other expression which
10423 computes the same value, but in a more efficient manner (including
10424 calls to other builtin functions).
10426 The call may contain arguments which need to be evaluated, but
10427 which are not useful to determine the result of the call. In
10428 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10429 COMPOUND_EXPR will be an argument which must be evaluated.
10430 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10431 COMPOUND_EXPR in the chain will contain the tree for the simplified
10432 form of the builtin function call. */
10434 static tree
10435 fold_builtin_strstr (tree s1, tree s2, tree type)
10437 if (!validate_arg (s1, POINTER_TYPE)
10438 || !validate_arg (s2, POINTER_TYPE))
10439 return NULL_TREE;
10440 else
10442 tree fn;
10443 const char *p1, *p2;
10445 p2 = c_getstr (s2);
10446 if (p2 == NULL)
10447 return NULL_TREE;
10449 p1 = c_getstr (s1);
10450 if (p1 != NULL)
10452 const char *r = strstr (p1, p2);
10453 tree tem;
10455 if (r == NULL)
10456 return build_int_cst (TREE_TYPE (s1), 0);
10458 /* Return an offset into the constant string argument. */
10459 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10460 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10461 return fold_convert (type, tem);
10464 /* The argument is const char *, and the result is char *, so we need
10465 a type conversion here to avoid a warning. */
10466 if (p2[0] == '\0')
10467 return fold_convert (type, s1);
10469 if (p2[1] != '\0')
10470 return NULL_TREE;
10472 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10473 if (!fn)
10474 return NULL_TREE;
10476 /* New argument list transforming strstr(s1, s2) to
10477 strchr(s1, s2[0]). */
10478 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10482 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10483 the call, and TYPE is its return type.
10485 Return NULL_TREE if no simplification was possible, otherwise return the
10486 simplified form of the call as a tree.
10488 The simplified form may be a constant or other expression which
10489 computes the same value, but in a more efficient manner (including
10490 calls to other builtin functions).
10492 The call may contain arguments which need to be evaluated, but
10493 which are not useful to determine the result of the call. In
10494 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10495 COMPOUND_EXPR will be an argument which must be evaluated.
10496 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10497 COMPOUND_EXPR in the chain will contain the tree for the simplified
10498 form of the builtin function call. */
10500 static tree
10501 fold_builtin_strchr (tree s1, tree s2, tree type)
10503 if (!validate_arg (s1, POINTER_TYPE)
10504 || !validate_arg (s2, INTEGER_TYPE))
10505 return NULL_TREE;
10506 else
10508 const char *p1;
10510 if (TREE_CODE (s2) != INTEGER_CST)
10511 return NULL_TREE;
10513 p1 = c_getstr (s1);
10514 if (p1 != NULL)
10516 char c;
10517 const char *r;
10518 tree tem;
10520 if (target_char_cast (s2, &c))
10521 return NULL_TREE;
10523 r = strchr (p1, c);
10525 if (r == NULL)
10526 return build_int_cst (TREE_TYPE (s1), 0);
10528 /* Return an offset into the constant string argument. */
10529 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10530 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10531 return fold_convert (type, tem);
10533 return NULL_TREE;
10537 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10538 the call, and TYPE is its return type.
10540 Return NULL_TREE if no simplification was possible, otherwise return the
10541 simplified form of the call as a tree.
10543 The simplified form may be a constant or other expression which
10544 computes the same value, but in a more efficient manner (including
10545 calls to other builtin functions).
10547 The call may contain arguments which need to be evaluated, but
10548 which are not useful to determine the result of the call. In
10549 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10550 COMPOUND_EXPR will be an argument which must be evaluated.
10551 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10552 COMPOUND_EXPR in the chain will contain the tree for the simplified
10553 form of the builtin function call. */
10555 static tree
10556 fold_builtin_strrchr (tree s1, tree s2, tree type)
10558 if (!validate_arg (s1, POINTER_TYPE)
10559 || !validate_arg (s2, INTEGER_TYPE))
10560 return NULL_TREE;
10561 else
10563 tree fn;
10564 const char *p1;
10566 if (TREE_CODE (s2) != INTEGER_CST)
10567 return NULL_TREE;
10569 p1 = c_getstr (s1);
10570 if (p1 != NULL)
10572 char c;
10573 const char *r;
10574 tree tem;
10576 if (target_char_cast (s2, &c))
10577 return NULL_TREE;
10579 r = strrchr (p1, c);
10581 if (r == NULL)
10582 return build_int_cst (TREE_TYPE (s1), 0);
10584 /* Return an offset into the constant string argument. */
10585 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10586 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10587 return fold_convert (type, tem);
10590 if (! integer_zerop (s2))
10591 return NULL_TREE;
10593 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10594 if (!fn)
10595 return NULL_TREE;
10597 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10598 return build_call_expr (fn, 2, s1, s2);
10602 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10603 to the call, and TYPE is its return type.
10605 Return NULL_TREE if no simplification was possible, otherwise return the
10606 simplified form of the call as a tree.
10608 The simplified form may be a constant or other expression which
10609 computes the same value, but in a more efficient manner (including
10610 calls to other builtin functions).
10612 The call may contain arguments which need to be evaluated, but
10613 which are not useful to determine the result of the call. In
10614 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10615 COMPOUND_EXPR will be an argument which must be evaluated.
10616 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10617 COMPOUND_EXPR in the chain will contain the tree for the simplified
10618 form of the builtin function call. */
10620 static tree
10621 fold_builtin_strpbrk (tree s1, tree s2, tree type)
10623 if (!validate_arg (s1, POINTER_TYPE)
10624 || !validate_arg (s2, POINTER_TYPE))
10625 return NULL_TREE;
10626 else
10628 tree fn;
10629 const char *p1, *p2;
10631 p2 = c_getstr (s2);
10632 if (p2 == NULL)
10633 return NULL_TREE;
10635 p1 = c_getstr (s1);
10636 if (p1 != NULL)
10638 const char *r = strpbrk (p1, p2);
10639 tree tem;
10641 if (r == NULL)
10642 return build_int_cst (TREE_TYPE (s1), 0);
10644 /* Return an offset into the constant string argument. */
10645 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10646 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10647 return fold_convert (type, tem);
10650 if (p2[0] == '\0')
10651 /* strpbrk(x, "") == NULL.
10652 Evaluate and ignore s1 in case it had side-effects. */
10653 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
10655 if (p2[1] != '\0')
10656 return NULL_TREE; /* Really call strpbrk. */
10658 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10659 if (!fn)
10660 return NULL_TREE;
10662 /* New argument list transforming strpbrk(s1, s2) to
10663 strchr(s1, s2[0]). */
10664 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10668 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10669 to the call.
10671 Return NULL_TREE if no simplification was possible, otherwise return the
10672 simplified form of the call as a tree.
10674 The simplified form may be a constant or other expression which
10675 computes the same value, but in a more efficient manner (including
10676 calls to other builtin functions).
10678 The call may contain arguments which need to be evaluated, but
10679 which are not useful to determine the result of the call. In
10680 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10681 COMPOUND_EXPR will be an argument which must be evaluated.
10682 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10683 COMPOUND_EXPR in the chain will contain the tree for the simplified
10684 form of the builtin function call. */
10686 static tree
10687 fold_builtin_strcat (tree dst, tree src)
10689 if (!validate_arg (dst, POINTER_TYPE)
10690 || !validate_arg (src, POINTER_TYPE))
10691 return NULL_TREE;
10692 else
10694 const char *p = c_getstr (src);
10696 /* If the string length is zero, return the dst parameter. */
10697 if (p && *p == '\0')
10698 return dst;
10700 return NULL_TREE;
10704 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10705 arguments to the call.
10707 Return NULL_TREE if no simplification was possible, otherwise return the
10708 simplified form of the call as a tree.
10710 The simplified form may be a constant or other expression which
10711 computes the same value, but in a more efficient manner (including
10712 calls to other builtin functions).
10714 The call may contain arguments which need to be evaluated, but
10715 which are not useful to determine the result of the call. In
10716 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10717 COMPOUND_EXPR will be an argument which must be evaluated.
10718 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10719 COMPOUND_EXPR in the chain will contain the tree for the simplified
10720 form of the builtin function call. */
10722 static tree
10723 fold_builtin_strncat (tree dst, tree src, tree len)
10725 if (!validate_arg (dst, POINTER_TYPE)
10726 || !validate_arg (src, POINTER_TYPE)
10727 || !validate_arg (len, INTEGER_TYPE))
10728 return NULL_TREE;
10729 else
10731 const char *p = c_getstr (src);
10733 /* If the requested length is zero, or the src parameter string
10734 length is zero, return the dst parameter. */
10735 if (integer_zerop (len) || (p && *p == '\0'))
10736 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
10738 /* If the requested len is greater than or equal to the string
10739 length, call strcat. */
10740 if (TREE_CODE (len) == INTEGER_CST && p
10741 && compare_tree_int (len, strlen (p)) >= 0)
10743 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
10745 /* If the replacement _DECL isn't initialized, don't do the
10746 transformation. */
10747 if (!fn)
10748 return NULL_TREE;
10750 return build_call_expr (fn, 2, dst, src);
10752 return NULL_TREE;
10756 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10757 to the call.
10759 Return NULL_TREE if no simplification was possible, otherwise return the
10760 simplified form of the call as a tree.
10762 The simplified form may be a constant or other expression which
10763 computes the same value, but in a more efficient manner (including
10764 calls to other builtin functions).
10766 The call may contain arguments which need to be evaluated, but
10767 which are not useful to determine the result of the call. In
10768 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10769 COMPOUND_EXPR will be an argument which must be evaluated.
10770 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10771 COMPOUND_EXPR in the chain will contain the tree for the simplified
10772 form of the builtin function call. */
10774 static tree
10775 fold_builtin_strspn (tree s1, tree s2)
10777 if (!validate_arg (s1, POINTER_TYPE)
10778 || !validate_arg (s2, POINTER_TYPE))
10779 return NULL_TREE;
10780 else
10782 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10784 /* If both arguments are constants, evaluate at compile-time. */
10785 if (p1 && p2)
10787 const size_t r = strspn (p1, p2);
10788 return size_int (r);
10791 /* If either argument is "", return NULL_TREE. */
10792 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10793 /* Evaluate and ignore both arguments in case either one has
10794 side-effects. */
10795 return omit_two_operands (integer_type_node, integer_zero_node,
10796 s1, s2);
10797 return NULL_TREE;
10801 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10802 to the call.
10804 Return NULL_TREE if no simplification was possible, otherwise return the
10805 simplified form of the call as a tree.
10807 The simplified form may be a constant or other expression which
10808 computes the same value, but in a more efficient manner (including
10809 calls to other builtin functions).
10811 The call may contain arguments which need to be evaluated, but
10812 which are not useful to determine the result of the call. In
10813 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10814 COMPOUND_EXPR will be an argument which must be evaluated.
10815 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10816 COMPOUND_EXPR in the chain will contain the tree for the simplified
10817 form of the builtin function call. */
10819 static tree
10820 fold_builtin_strcspn (tree s1, tree s2)
10822 if (!validate_arg (s1, POINTER_TYPE)
10823 || !validate_arg (s2, POINTER_TYPE))
10824 return NULL_TREE;
10825 else
10827 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10829 /* If both arguments are constants, evaluate at compile-time. */
10830 if (p1 && p2)
10832 const size_t r = strcspn (p1, p2);
10833 return size_int (r);
10836 /* If the first argument is "", return NULL_TREE. */
10837 if (p1 && *p1 == '\0')
10839 /* Evaluate and ignore argument s2 in case it has
10840 side-effects. */
10841 return omit_one_operand (integer_type_node,
10842 integer_zero_node, s2);
10845 /* If the second argument is "", return __builtin_strlen(s1). */
10846 if (p2 && *p2 == '\0')
10848 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
10850 /* If the replacement _DECL isn't initialized, don't do the
10851 transformation. */
10852 if (!fn)
10853 return NULL_TREE;
10855 return build_call_expr (fn, 1, s1);
10857 return NULL_TREE;
10861 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
10862 to the call. IGNORE is true if the value returned
10863 by the builtin will be ignored. UNLOCKED is true is true if this
10864 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
10865 the known length of the string. Return NULL_TREE if no simplification
10866 was possible. */
10868 tree
10869 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
10871 /* If we're using an unlocked function, assume the other unlocked
10872 functions exist explicitly. */
10873 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
10874 : implicit_built_in_decls[BUILT_IN_FPUTC];
10875 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
10876 : implicit_built_in_decls[BUILT_IN_FWRITE];
10878 /* If the return value is used, don't do the transformation. */
10879 if (!ignore)
10880 return NULL_TREE;
10882 /* Verify the arguments in the original call. */
10883 if (!validate_arg (arg0, POINTER_TYPE)
10884 || !validate_arg (arg1, POINTER_TYPE))
10885 return NULL_TREE;
10887 if (! len)
10888 len = c_strlen (arg0, 0);
10890 /* Get the length of the string passed to fputs. If the length
10891 can't be determined, punt. */
10892 if (!len
10893 || TREE_CODE (len) != INTEGER_CST)
10894 return NULL_TREE;
10896 switch (compare_tree_int (len, 1))
10898 case -1: /* length is 0, delete the call entirely . */
10899 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
10901 case 0: /* length is 1, call fputc. */
10903 const char *p = c_getstr (arg0);
10905 if (p != NULL)
10907 if (fn_fputc)
10908 return build_call_expr (fn_fputc, 2,
10909 build_int_cst (NULL_TREE, p[0]), arg1);
10910 else
10911 return NULL_TREE;
10914 /* FALLTHROUGH */
10915 case 1: /* length is greater than 1, call fwrite. */
10917 /* If optimizing for size keep fputs. */
10918 if (optimize_size)
10919 return NULL_TREE;
10920 /* New argument list transforming fputs(string, stream) to
10921 fwrite(string, 1, len, stream). */
10922 if (fn_fwrite)
10923 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
10924 else
10925 return NULL_TREE;
10927 default:
10928 gcc_unreachable ();
10930 return NULL_TREE;
10933 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10934 produced. False otherwise. This is done so that we don't output the error
10935 or warning twice or three times. */
10936 bool
10937 fold_builtin_next_arg (tree exp, bool va_start_p)
10939 tree fntype = TREE_TYPE (current_function_decl);
10940 int nargs = call_expr_nargs (exp);
10941 tree arg;
10943 if (TYPE_ARG_TYPES (fntype) == 0
10944 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
10945 == void_type_node))
10947 error ("%<va_start%> used in function with fixed args");
10948 return true;
10951 if (va_start_p)
10953 if (va_start_p && (nargs != 2))
10955 error ("wrong number of arguments to function %<va_start%>");
10956 return true;
10958 arg = CALL_EXPR_ARG (exp, 1);
10960 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10961 when we checked the arguments and if needed issued a warning. */
10962 else
10964 if (nargs == 0)
10966 /* Evidently an out of date version of <stdarg.h>; can't validate
10967 va_start's second argument, but can still work as intended. */
10968 warning (0, "%<__builtin_next_arg%> called without an argument");
10969 return true;
10971 else if (nargs > 1)
10973 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10974 return true;
10976 arg = CALL_EXPR_ARG (exp, 0);
10979 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10980 or __builtin_next_arg (0) the first time we see it, after checking
10981 the arguments and if needed issuing a warning. */
10982 if (!integer_zerop (arg))
10984 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10986 /* Strip off all nops for the sake of the comparison. This
10987 is not quite the same as STRIP_NOPS. It does more.
10988 We must also strip off INDIRECT_EXPR for C++ reference
10989 parameters. */
10990 while (TREE_CODE (arg) == NOP_EXPR
10991 || TREE_CODE (arg) == CONVERT_EXPR
10992 || TREE_CODE (arg) == NON_LVALUE_EXPR
10993 || TREE_CODE (arg) == INDIRECT_REF)
10994 arg = TREE_OPERAND (arg, 0);
10995 if (arg != last_parm)
10997 /* FIXME: Sometimes with the tree optimizers we can get the
10998 not the last argument even though the user used the last
10999 argument. We just warn and set the arg to be the last
11000 argument so that we will get wrong-code because of
11001 it. */
11002 warning (0, "second parameter of %<va_start%> not last named argument");
11004 /* We want to verify the second parameter just once before the tree
11005 optimizers are run and then avoid keeping it in the tree,
11006 as otherwise we could warn even for correct code like:
11007 void foo (int i, ...)
11008 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11009 if (va_start_p)
11010 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11011 else
11012 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11014 return false;
11018 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11019 ORIG may be null if this is a 2-argument call. We don't attempt to
11020 simplify calls with more than 3 arguments.
11022 Return NULL_TREE if no simplification was possible, otherwise return the
11023 simplified form of the call as a tree. If IGNORED is true, it means that
11024 the caller does not use the returned value of the function. */
11026 static tree
11027 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11029 tree call, retval;
11030 const char *fmt_str = NULL;
11032 /* Verify the required arguments in the original call. We deal with two
11033 types of sprintf() calls: 'sprintf (str, fmt)' and
11034 'sprintf (dest, "%s", orig)'. */
11035 if (!validate_arg (dest, POINTER_TYPE)
11036 || !validate_arg (fmt, POINTER_TYPE))
11037 return NULL_TREE;
11038 if (orig && !validate_arg (orig, POINTER_TYPE))
11039 return NULL_TREE;
11041 /* Check whether the format is a literal string constant. */
11042 fmt_str = c_getstr (fmt);
11043 if (fmt_str == NULL)
11044 return NULL_TREE;
11046 call = NULL_TREE;
11047 retval = NULL_TREE;
11049 if (!init_target_chars ())
11050 return NULL_TREE;
11052 /* If the format doesn't contain % args or %%, use strcpy. */
11053 if (strchr (fmt_str, target_percent) == NULL)
11055 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11057 if (!fn)
11058 return NULL_TREE;
11060 /* Don't optimize sprintf (buf, "abc", ptr++). */
11061 if (orig)
11062 return NULL_TREE;
11064 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11065 'format' is known to contain no % formats. */
11066 call = build_call_expr (fn, 2, dest, fmt);
11067 if (!ignored)
11068 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11071 /* If the format is "%s", use strcpy if the result isn't used. */
11072 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11074 tree fn;
11075 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11077 if (!fn)
11078 return NULL_TREE;
11080 /* Don't crash on sprintf (str1, "%s"). */
11081 if (!orig)
11082 return NULL_TREE;
11084 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11085 if (!ignored)
11087 retval = c_strlen (orig, 1);
11088 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11089 return NULL_TREE;
11091 call = build_call_expr (fn, 2, dest, orig);
11094 if (call && retval)
11096 retval = fold_convert
11097 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11098 retval);
11099 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11101 else
11102 return call;
11105 /* Expand a call EXP to __builtin_object_size. */
11108 expand_builtin_object_size (tree exp)
11110 tree ost;
11111 int object_size_type;
11112 tree fndecl = get_callee_fndecl (exp);
11113 location_t locus = EXPR_LOCATION (exp);
11115 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11117 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11118 &locus, fndecl);
11119 expand_builtin_trap ();
11120 return const0_rtx;
11123 ost = CALL_EXPR_ARG (exp, 1);
11124 STRIP_NOPS (ost);
11126 if (TREE_CODE (ost) != INTEGER_CST
11127 || tree_int_cst_sgn (ost) < 0
11128 || compare_tree_int (ost, 3) > 0)
11130 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11131 &locus, fndecl);
11132 expand_builtin_trap ();
11133 return const0_rtx;
11136 object_size_type = tree_low_cst (ost, 0);
11138 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11141 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11142 FCODE is the BUILT_IN_* to use.
11143 Return NULL_RTX if we failed; the caller should emit a normal call,
11144 otherwise try to get the result in TARGET, if convenient (and in
11145 mode MODE if that's convenient). */
11147 static rtx
11148 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11149 enum built_in_function fcode)
11151 tree dest, src, len, size;
11153 if (!validate_arglist (exp,
11154 POINTER_TYPE,
11155 fcode == BUILT_IN_MEMSET_CHK
11156 ? INTEGER_TYPE : POINTER_TYPE,
11157 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11158 return NULL_RTX;
11160 dest = CALL_EXPR_ARG (exp, 0);
11161 src = CALL_EXPR_ARG (exp, 1);
11162 len = CALL_EXPR_ARG (exp, 2);
11163 size = CALL_EXPR_ARG (exp, 3);
11165 if (! host_integerp (size, 1))
11166 return NULL_RTX;
11168 if (host_integerp (len, 1) || integer_all_onesp (size))
11170 tree fn;
11172 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11174 location_t locus = EXPR_LOCATION (exp);
11175 warning (0, "%Hcall to %D will always overflow destination buffer",
11176 &locus, get_callee_fndecl (exp));
11177 return NULL_RTX;
11180 fn = NULL_TREE;
11181 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11182 mem{cpy,pcpy,move,set} is available. */
11183 switch (fcode)
11185 case BUILT_IN_MEMCPY_CHK:
11186 fn = built_in_decls[BUILT_IN_MEMCPY];
11187 break;
11188 case BUILT_IN_MEMPCPY_CHK:
11189 fn = built_in_decls[BUILT_IN_MEMPCPY];
11190 break;
11191 case BUILT_IN_MEMMOVE_CHK:
11192 fn = built_in_decls[BUILT_IN_MEMMOVE];
11193 break;
11194 case BUILT_IN_MEMSET_CHK:
11195 fn = built_in_decls[BUILT_IN_MEMSET];
11196 break;
11197 default:
11198 break;
11201 if (! fn)
11202 return NULL_RTX;
11204 fn = build_call_expr (fn, 3, dest, src, len);
11205 if (TREE_CODE (fn) == CALL_EXPR)
11206 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11207 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11209 else if (fcode == BUILT_IN_MEMSET_CHK)
11210 return NULL_RTX;
11211 else
11213 unsigned int dest_align
11214 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11216 /* If DEST is not a pointer type, call the normal function. */
11217 if (dest_align == 0)
11218 return NULL_RTX;
11220 /* If SRC and DEST are the same (and not volatile), do nothing. */
11221 if (operand_equal_p (src, dest, 0))
11223 tree expr;
11225 if (fcode != BUILT_IN_MEMPCPY_CHK)
11227 /* Evaluate and ignore LEN in case it has side-effects. */
11228 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11229 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11232 len = fold_convert (TREE_TYPE (dest), len);
11233 expr = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
11234 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11237 /* __memmove_chk special case. */
11238 if (fcode == BUILT_IN_MEMMOVE_CHK)
11240 unsigned int src_align
11241 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11243 if (src_align == 0)
11244 return NULL_RTX;
11246 /* If src is categorized for a readonly section we can use
11247 normal __memcpy_chk. */
11248 if (readonly_data_expr (src))
11250 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11251 if (!fn)
11252 return NULL_RTX;
11253 fn = build_call_expr (fn, 4, dest, src, len, size);
11254 if (TREE_CODE (fn) == CALL_EXPR)
11255 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11256 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11259 return NULL_RTX;
11263 /* Emit warning if a buffer overflow is detected at compile time. */
11265 static void
11266 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11268 int is_strlen = 0;
11269 tree len, size;
11270 location_t locus;
11272 switch (fcode)
11274 case BUILT_IN_STRCPY_CHK:
11275 case BUILT_IN_STPCPY_CHK:
11276 /* For __strcat_chk the warning will be emitted only if overflowing
11277 by at least strlen (dest) + 1 bytes. */
11278 case BUILT_IN_STRCAT_CHK:
11279 len = CALL_EXPR_ARG (exp, 1);
11280 size = CALL_EXPR_ARG (exp, 2);
11281 is_strlen = 1;
11282 break;
11283 case BUILT_IN_STRNCAT_CHK:
11284 case BUILT_IN_STRNCPY_CHK:
11285 len = CALL_EXPR_ARG (exp, 2);
11286 size = CALL_EXPR_ARG (exp, 3);
11287 break;
11288 case BUILT_IN_SNPRINTF_CHK:
11289 case BUILT_IN_VSNPRINTF_CHK:
11290 len = CALL_EXPR_ARG (exp, 1);
11291 size = CALL_EXPR_ARG (exp, 3);
11292 break;
11293 default:
11294 gcc_unreachable ();
11297 if (!len || !size)
11298 return;
11300 if (! host_integerp (size, 1) || integer_all_onesp (size))
11301 return;
11303 if (is_strlen)
11305 len = c_strlen (len, 1);
11306 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11307 return;
11309 else if (fcode == BUILT_IN_STRNCAT_CHK)
11311 tree src = CALL_EXPR_ARG (exp, 1);
11312 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11313 return;
11314 src = c_strlen (src, 1);
11315 if (! src || ! host_integerp (src, 1))
11317 locus = EXPR_LOCATION (exp);
11318 warning (0, "%Hcall to %D might overflow destination buffer",
11319 &locus, get_callee_fndecl (exp));
11320 return;
11322 else if (tree_int_cst_lt (src, size))
11323 return;
11325 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11326 return;
11328 locus = EXPR_LOCATION (exp);
11329 warning (0, "%Hcall to %D will always overflow destination buffer",
11330 &locus, get_callee_fndecl (exp));
11333 /* Emit warning if a buffer overflow is detected at compile time
11334 in __sprintf_chk/__vsprintf_chk calls. */
11336 static void
11337 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11339 tree dest, size, len, fmt, flag;
11340 const char *fmt_str;
11341 int nargs = call_expr_nargs (exp);
11343 /* Verify the required arguments in the original call. */
11345 if (nargs < 4)
11346 return;
11347 dest = CALL_EXPR_ARG (exp, 0);
11348 flag = CALL_EXPR_ARG (exp, 1);
11349 size = CALL_EXPR_ARG (exp, 2);
11350 fmt = CALL_EXPR_ARG (exp, 3);
11352 if (! host_integerp (size, 1) || integer_all_onesp (size))
11353 return;
11355 /* Check whether the format is a literal string constant. */
11356 fmt_str = c_getstr (fmt);
11357 if (fmt_str == NULL)
11358 return;
11360 if (!init_target_chars ())
11361 return;
11363 /* If the format doesn't contain % args or %%, we know its size. */
11364 if (strchr (fmt_str, target_percent) == 0)
11365 len = build_int_cstu (size_type_node, strlen (fmt_str));
11366 /* If the format is "%s" and first ... argument is a string literal,
11367 we know it too. */
11368 else if (fcode == BUILT_IN_SPRINTF_CHK
11369 && strcmp (fmt_str, target_percent_s) == 0)
11371 tree arg;
11373 if (nargs < 5)
11374 return;
11375 arg = CALL_EXPR_ARG (exp, 4);
11376 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11377 return;
11379 len = c_strlen (arg, 1);
11380 if (!len || ! host_integerp (len, 1))
11381 return;
11383 else
11384 return;
11386 if (! tree_int_cst_lt (len, size))
11388 location_t locus = EXPR_LOCATION (exp);
11389 warning (0, "%Hcall to %D will always overflow destination buffer",
11390 &locus, get_callee_fndecl (exp));
11394 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11395 if possible. */
11397 tree
11398 fold_builtin_object_size (tree ptr, tree ost)
11400 tree ret = NULL_TREE;
11401 int object_size_type;
11403 if (!validate_arg (ptr, POINTER_TYPE)
11404 || !validate_arg (ost, INTEGER_TYPE))
11405 return NULL_TREE;
11407 STRIP_NOPS (ost);
11409 if (TREE_CODE (ost) != INTEGER_CST
11410 || tree_int_cst_sgn (ost) < 0
11411 || compare_tree_int (ost, 3) > 0)
11412 return NULL_TREE;
11414 object_size_type = tree_low_cst (ost, 0);
11416 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11417 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11418 and (size_t) 0 for types 2 and 3. */
11419 if (TREE_SIDE_EFFECTS (ptr))
11420 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11422 if (TREE_CODE (ptr) == ADDR_EXPR)
11423 ret = build_int_cstu (size_type_node,
11424 compute_builtin_object_size (ptr, object_size_type));
11426 else if (TREE_CODE (ptr) == SSA_NAME)
11428 unsigned HOST_WIDE_INT bytes;
11430 /* If object size is not known yet, delay folding until
11431 later. Maybe subsequent passes will help determining
11432 it. */
11433 bytes = compute_builtin_object_size (ptr, object_size_type);
11434 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11435 ? -1 : 0))
11436 ret = build_int_cstu (size_type_node, bytes);
11439 if (ret)
11441 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11442 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11443 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11444 ret = NULL_TREE;
11447 return ret;
11450 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11451 DEST, SRC, LEN, and SIZE are the arguments to the call.
11452 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11453 code of the builtin. If MAXLEN is not NULL, it is maximum length
11454 passed as third argument. */
11456 tree
11457 fold_builtin_memory_chk (tree fndecl,
11458 tree dest, tree src, tree len, tree size,
11459 tree maxlen, bool ignore,
11460 enum built_in_function fcode)
11462 tree fn;
11464 if (!validate_arg (dest, POINTER_TYPE)
11465 || !validate_arg (src,
11466 (fcode == BUILT_IN_MEMSET_CHK
11467 ? INTEGER_TYPE : POINTER_TYPE))
11468 || !validate_arg (len, INTEGER_TYPE)
11469 || !validate_arg (size, INTEGER_TYPE))
11470 return NULL_TREE;
11472 /* If SRC and DEST are the same (and not volatile), return DEST
11473 (resp. DEST+LEN for __mempcpy_chk). */
11474 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11476 if (fcode != BUILT_IN_MEMPCPY_CHK)
11477 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11478 else
11480 tree temp = fold_convert (TREE_TYPE (dest), len);
11481 temp = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, temp);
11482 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11486 if (! host_integerp (size, 1))
11487 return NULL_TREE;
11489 if (! integer_all_onesp (size))
11491 if (! host_integerp (len, 1))
11493 /* If LEN is not constant, try MAXLEN too.
11494 For MAXLEN only allow optimizing into non-_ocs function
11495 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11496 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11498 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11500 /* (void) __mempcpy_chk () can be optimized into
11501 (void) __memcpy_chk (). */
11502 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11503 if (!fn)
11504 return NULL_TREE;
11506 return build_call_expr (fn, 4, dest, src, len, size);
11508 return NULL_TREE;
11511 else
11512 maxlen = len;
11514 if (tree_int_cst_lt (size, maxlen))
11515 return NULL_TREE;
11518 fn = NULL_TREE;
11519 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11520 mem{cpy,pcpy,move,set} is available. */
11521 switch (fcode)
11523 case BUILT_IN_MEMCPY_CHK:
11524 fn = built_in_decls[BUILT_IN_MEMCPY];
11525 break;
11526 case BUILT_IN_MEMPCPY_CHK:
11527 fn = built_in_decls[BUILT_IN_MEMPCPY];
11528 break;
11529 case BUILT_IN_MEMMOVE_CHK:
11530 fn = built_in_decls[BUILT_IN_MEMMOVE];
11531 break;
11532 case BUILT_IN_MEMSET_CHK:
11533 fn = built_in_decls[BUILT_IN_MEMSET];
11534 break;
11535 default:
11536 break;
11539 if (!fn)
11540 return NULL_TREE;
11542 return build_call_expr (fn, 3, dest, src, len);
11545 /* Fold a call to the __st[rp]cpy_chk builtin.
11546 DEST, SRC, and SIZE are the arguments to the call.
11547 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11548 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11549 strings passed as second argument. */
11551 tree
11552 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11553 tree maxlen, bool ignore,
11554 enum built_in_function fcode)
11556 tree len, fn;
11558 if (!validate_arg (dest, POINTER_TYPE)
11559 || !validate_arg (src, POINTER_TYPE)
11560 || !validate_arg (size, INTEGER_TYPE))
11561 return NULL_TREE;
11563 /* If SRC and DEST are the same (and not volatile), return DEST. */
11564 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11565 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11567 if (! host_integerp (size, 1))
11568 return NULL_TREE;
11570 if (! integer_all_onesp (size))
11572 len = c_strlen (src, 1);
11573 if (! len || ! host_integerp (len, 1))
11575 /* If LEN is not constant, try MAXLEN too.
11576 For MAXLEN only allow optimizing into non-_ocs function
11577 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11578 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11580 if (fcode == BUILT_IN_STPCPY_CHK)
11582 if (! ignore)
11583 return NULL_TREE;
11585 /* If return value of __stpcpy_chk is ignored,
11586 optimize into __strcpy_chk. */
11587 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11588 if (!fn)
11589 return NULL_TREE;
11591 return build_call_expr (fn, 3, dest, src, size);
11594 if (! len || TREE_SIDE_EFFECTS (len))
11595 return NULL_TREE;
11597 /* If c_strlen returned something, but not a constant,
11598 transform __strcpy_chk into __memcpy_chk. */
11599 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11600 if (!fn)
11601 return NULL_TREE;
11603 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11604 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11605 build_call_expr (fn, 4,
11606 dest, src, len, size));
11609 else
11610 maxlen = len;
11612 if (! tree_int_cst_lt (maxlen, size))
11613 return NULL_TREE;
11616 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11617 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
11618 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
11619 if (!fn)
11620 return NULL_TREE;
11622 return build_call_expr (fn, 2, dest, src);
11625 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11626 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11627 length passed as third argument. */
11629 tree
11630 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
11631 tree maxlen)
11633 tree fn;
11635 if (!validate_arg (dest, POINTER_TYPE)
11636 || !validate_arg (src, POINTER_TYPE)
11637 || !validate_arg (len, INTEGER_TYPE)
11638 || !validate_arg (size, INTEGER_TYPE))
11639 return NULL_TREE;
11641 if (! host_integerp (size, 1))
11642 return NULL_TREE;
11644 if (! integer_all_onesp (size))
11646 if (! host_integerp (len, 1))
11648 /* If LEN is not constant, try MAXLEN too.
11649 For MAXLEN only allow optimizing into non-_ocs function
11650 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11651 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11652 return NULL_TREE;
11654 else
11655 maxlen = len;
11657 if (tree_int_cst_lt (size, maxlen))
11658 return NULL_TREE;
11661 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11662 fn = built_in_decls[BUILT_IN_STRNCPY];
11663 if (!fn)
11664 return NULL_TREE;
11666 return build_call_expr (fn, 3, dest, src, len);
11669 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11670 are the arguments to the call. */
11672 static tree
11673 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
11675 tree fn;
11676 const char *p;
11678 if (!validate_arg (dest, POINTER_TYPE)
11679 || !validate_arg (src, POINTER_TYPE)
11680 || !validate_arg (size, INTEGER_TYPE))
11681 return NULL_TREE;
11683 p = c_getstr (src);
11684 /* If the SRC parameter is "", return DEST. */
11685 if (p && *p == '\0')
11686 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11688 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
11689 return NULL_TREE;
11691 /* If __builtin_strcat_chk is used, assume strcat is available. */
11692 fn = built_in_decls[BUILT_IN_STRCAT];
11693 if (!fn)
11694 return NULL_TREE;
11696 return build_call_expr (fn, 2, dest, src);
11699 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11700 LEN, and SIZE. */
11702 static tree
11703 fold_builtin_strncat_chk (tree fndecl,
11704 tree dest, tree src, tree len, tree size)
11706 tree fn;
11707 const char *p;
11709 if (!validate_arg (dest, POINTER_TYPE)
11710 || !validate_arg (src, POINTER_TYPE)
11711 || !validate_arg (size, INTEGER_TYPE)
11712 || !validate_arg (size, INTEGER_TYPE))
11713 return NULL_TREE;
11715 p = c_getstr (src);
11716 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11717 if (p && *p == '\0')
11718 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11719 else if (integer_zerop (len))
11720 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11722 if (! host_integerp (size, 1))
11723 return NULL_TREE;
11725 if (! integer_all_onesp (size))
11727 tree src_len = c_strlen (src, 1);
11728 if (src_len
11729 && host_integerp (src_len, 1)
11730 && host_integerp (len, 1)
11731 && ! tree_int_cst_lt (len, src_len))
11733 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11734 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
11735 if (!fn)
11736 return NULL_TREE;
11738 return build_call_expr (fn, 3, dest, src, size);
11740 return NULL_TREE;
11743 /* If __builtin_strncat_chk is used, assume strncat is available. */
11744 fn = built_in_decls[BUILT_IN_STRNCAT];
11745 if (!fn)
11746 return NULL_TREE;
11748 return build_call_expr (fn, 3, dest, src, len);
11751 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
11752 a normal call should be emitted rather than expanding the function
11753 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
11755 static tree
11756 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
11758 tree dest, size, len, fn, fmt, flag;
11759 const char *fmt_str;
11760 int nargs = call_expr_nargs (exp);
11762 /* Verify the required arguments in the original call. */
11763 if (nargs < 4)
11764 return NULL_TREE;
11765 dest = CALL_EXPR_ARG (exp, 0);
11766 if (!validate_arg (dest, POINTER_TYPE))
11767 return NULL_TREE;
11768 flag = CALL_EXPR_ARG (exp, 1);
11769 if (!validate_arg (flag, INTEGER_TYPE))
11770 return NULL_TREE;
11771 size = CALL_EXPR_ARG (exp, 2);
11772 if (!validate_arg (size, INTEGER_TYPE))
11773 return NULL_TREE;
11774 fmt = CALL_EXPR_ARG (exp, 3);
11775 if (!validate_arg (fmt, POINTER_TYPE))
11776 return NULL_TREE;
11778 if (! host_integerp (size, 1))
11779 return NULL_TREE;
11781 len = NULL_TREE;
11783 if (!init_target_chars ())
11784 return NULL_TREE;
11786 /* Check whether the format is a literal string constant. */
11787 fmt_str = c_getstr (fmt);
11788 if (fmt_str != NULL)
11790 /* If the format doesn't contain % args or %%, we know the size. */
11791 if (strchr (fmt_str, target_percent) == 0)
11793 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
11794 len = build_int_cstu (size_type_node, strlen (fmt_str));
11796 /* If the format is "%s" and first ... argument is a string literal,
11797 we know the size too. */
11798 else if (fcode == BUILT_IN_SPRINTF_CHK
11799 && strcmp (fmt_str, target_percent_s) == 0)
11801 tree arg;
11803 if (nargs == 5)
11805 arg = CALL_EXPR_ARG (exp, 4);
11806 if (validate_arg (arg, POINTER_TYPE))
11808 len = c_strlen (arg, 1);
11809 if (! len || ! host_integerp (len, 1))
11810 len = NULL_TREE;
11816 if (! integer_all_onesp (size))
11818 if (! len || ! tree_int_cst_lt (len, size))
11819 return NULL_TREE;
11822 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
11823 or if format doesn't contain % chars or is "%s". */
11824 if (! integer_zerop (flag))
11826 if (fmt_str == NULL)
11827 return NULL_TREE;
11828 if (strchr (fmt_str, target_percent) != NULL
11829 && strcmp (fmt_str, target_percent_s))
11830 return NULL_TREE;
11833 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
11834 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
11835 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
11836 if (!fn)
11837 return NULL_TREE;
11839 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
11842 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
11843 a normal call should be emitted rather than expanding the function
11844 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
11845 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
11846 passed as second argument. */
11848 tree
11849 fold_builtin_snprintf_chk (tree exp, tree maxlen,
11850 enum built_in_function fcode)
11852 tree dest, size, len, fn, fmt, flag;
11853 const char *fmt_str;
11855 /* Verify the required arguments in the original call. */
11856 if (call_expr_nargs (exp) < 5)
11857 return NULL_TREE;
11858 dest = CALL_EXPR_ARG (exp, 0);
11859 if (!validate_arg (dest, POINTER_TYPE))
11860 return NULL_TREE;
11861 len = CALL_EXPR_ARG (exp, 1);
11862 if (!validate_arg (len, INTEGER_TYPE))
11863 return NULL_TREE;
11864 flag = CALL_EXPR_ARG (exp, 2);
11865 if (!validate_arg (flag, INTEGER_TYPE))
11866 return NULL_TREE;
11867 size = CALL_EXPR_ARG (exp, 3);
11868 if (!validate_arg (size, INTEGER_TYPE))
11869 return NULL_TREE;
11870 fmt = CALL_EXPR_ARG (exp, 4);
11871 if (!validate_arg (fmt, POINTER_TYPE))
11872 return NULL_TREE;
11874 if (! host_integerp (size, 1))
11875 return NULL_TREE;
11877 if (! integer_all_onesp (size))
11879 if (! host_integerp (len, 1))
11881 /* If LEN is not constant, try MAXLEN too.
11882 For MAXLEN only allow optimizing into non-_ocs function
11883 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11884 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11885 return NULL_TREE;
11887 else
11888 maxlen = len;
11890 if (tree_int_cst_lt (size, maxlen))
11891 return NULL_TREE;
11894 if (!init_target_chars ())
11895 return NULL_TREE;
11897 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
11898 or if format doesn't contain % chars or is "%s". */
11899 if (! integer_zerop (flag))
11901 fmt_str = c_getstr (fmt);
11902 if (fmt_str == NULL)
11903 return NULL_TREE;
11904 if (strchr (fmt_str, target_percent) != NULL
11905 && strcmp (fmt_str, target_percent_s))
11906 return NULL_TREE;
11909 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
11910 available. */
11911 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
11912 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
11913 if (!fn)
11914 return NULL_TREE;
11916 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
11919 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11920 FMT and ARG are the arguments to the call; we don't fold cases with
11921 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11923 Return NULL_TREE if no simplification was possible, otherwise return the
11924 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11925 code of the function to be simplified. */
11927 static tree
11928 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
11929 enum built_in_function fcode)
11931 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
11932 const char *fmt_str = NULL;
11934 /* If the return value is used, don't do the transformation. */
11935 if (! ignore)
11936 return NULL_TREE;
11938 /* Verify the required arguments in the original call. */
11939 if (!validate_arg (fmt, POINTER_TYPE))
11940 return NULL_TREE;
11942 /* Check whether the format is a literal string constant. */
11943 fmt_str = c_getstr (fmt);
11944 if (fmt_str == NULL)
11945 return NULL_TREE;
11947 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
11949 /* If we're using an unlocked function, assume the other
11950 unlocked functions exist explicitly. */
11951 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
11952 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
11954 else
11956 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
11957 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
11960 if (!init_target_chars ())
11961 return NULL_TREE;
11963 if (strcmp (fmt_str, target_percent_s) == 0
11964 || strchr (fmt_str, target_percent) == NULL)
11966 const char *str;
11968 if (strcmp (fmt_str, target_percent_s) == 0)
11970 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11971 return NULL_TREE;
11973 if (!arg || !validate_arg (arg, POINTER_TYPE))
11974 return NULL_TREE;
11976 str = c_getstr (arg);
11977 if (str == NULL)
11978 return NULL_TREE;
11980 else
11982 /* The format specifier doesn't contain any '%' characters. */
11983 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
11984 && arg)
11985 return NULL_TREE;
11986 str = fmt_str;
11989 /* If the string was "", printf does nothing. */
11990 if (str[0] == '\0')
11991 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11993 /* If the string has length of 1, call putchar. */
11994 if (str[1] == '\0')
11996 /* Given printf("c"), (where c is any one character,)
11997 convert "c"[0] to an int and pass that to the replacement
11998 function. */
11999 newarg = build_int_cst (NULL_TREE, str[0]);
12000 if (fn_putchar)
12001 call = build_call_expr (fn_putchar, 1, newarg);
12003 else
12005 /* If the string was "string\n", call puts("string"). */
12006 size_t len = strlen (str);
12007 if ((unsigned char)str[len - 1] == target_newline)
12009 /* Create a NUL-terminated string that's one char shorter
12010 than the original, stripping off the trailing '\n'. */
12011 char *newstr = alloca (len);
12012 memcpy (newstr, str, len - 1);
12013 newstr[len - 1] = 0;
12015 newarg = build_string_literal (len, newstr);
12016 if (fn_puts)
12017 call = build_call_expr (fn_puts, 1, newarg);
12019 else
12020 /* We'd like to arrange to call fputs(string,stdout) here,
12021 but we need stdout and don't have a way to get it yet. */
12022 return NULL_TREE;
12026 /* The other optimizations can be done only on the non-va_list variants. */
12027 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12028 return NULL_TREE;
12030 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12031 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12033 if (!arg || !validate_arg (arg, POINTER_TYPE))
12034 return NULL_TREE;
12035 if (fn_puts)
12036 call = build_call_expr (fn_puts, 1, arg);
12039 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12040 else if (strcmp (fmt_str, target_percent_c) == 0)
12042 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12043 return NULL_TREE;
12044 if (fn_putchar)
12045 call = build_call_expr (fn_putchar, 1, arg);
12048 if (!call)
12049 return NULL_TREE;
12051 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12054 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12055 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12056 more than 3 arguments, and ARG may be null in the 2-argument case.
12058 Return NULL_TREE if no simplification was possible, otherwise return the
12059 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12060 code of the function to be simplified. */
12062 static tree
12063 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12064 enum built_in_function fcode)
12066 tree fn_fputc, fn_fputs, call = NULL_TREE;
12067 const char *fmt_str = NULL;
12069 /* If the return value is used, don't do the transformation. */
12070 if (! ignore)
12071 return NULL_TREE;
12073 /* Verify the required arguments in the original call. */
12074 if (!validate_arg (fp, POINTER_TYPE))
12075 return NULL_TREE;
12076 if (!validate_arg (fmt, POINTER_TYPE))
12077 return NULL_TREE;
12079 /* Check whether the format is a literal string constant. */
12080 fmt_str = c_getstr (fmt);
12081 if (fmt_str == NULL)
12082 return NULL_TREE;
12084 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12086 /* If we're using an unlocked function, assume the other
12087 unlocked functions exist explicitly. */
12088 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12089 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12091 else
12093 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12094 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12097 if (!init_target_chars ())
12098 return NULL_TREE;
12100 /* If the format doesn't contain % args or %%, use strcpy. */
12101 if (strchr (fmt_str, target_percent) == NULL)
12103 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12104 && arg)
12105 return NULL_TREE;
12107 /* If the format specifier was "", fprintf does nothing. */
12108 if (fmt_str[0] == '\0')
12110 /* If FP has side-effects, just wait until gimplification is
12111 done. */
12112 if (TREE_SIDE_EFFECTS (fp))
12113 return NULL_TREE;
12115 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12118 /* When "string" doesn't contain %, replace all cases of
12119 fprintf (fp, string) with fputs (string, fp). The fputs
12120 builtin will take care of special cases like length == 1. */
12121 if (fn_fputs)
12122 call = build_call_expr (fn_fputs, 2, fmt, fp);
12125 /* The other optimizations can be done only on the non-va_list variants. */
12126 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12127 return NULL_TREE;
12129 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12130 else if (strcmp (fmt_str, target_percent_s) == 0)
12132 if (!arg || !validate_arg (arg, POINTER_TYPE))
12133 return NULL_TREE;
12134 if (fn_fputs)
12135 call = build_call_expr (fn_fputs, 2, arg, fp);
12138 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12139 else if (strcmp (fmt_str, target_percent_c) == 0)
12141 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12142 return NULL_TREE;
12143 if (fn_fputc)
12144 call = build_call_expr (fn_fputc, 2, arg, fp);
12147 if (!call)
12148 return NULL_TREE;
12149 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12152 /* Initialize format string characters in the target charset. */
12154 static bool
12155 init_target_chars (void)
12157 static bool init;
12158 if (!init)
12160 target_newline = lang_hooks.to_target_charset ('\n');
12161 target_percent = lang_hooks.to_target_charset ('%');
12162 target_c = lang_hooks.to_target_charset ('c');
12163 target_s = lang_hooks.to_target_charset ('s');
12164 if (target_newline == 0 || target_percent == 0 || target_c == 0
12165 || target_s == 0)
12166 return false;
12168 target_percent_c[0] = target_percent;
12169 target_percent_c[1] = target_c;
12170 target_percent_c[2] = '\0';
12172 target_percent_s[0] = target_percent;
12173 target_percent_s[1] = target_s;
12174 target_percent_s[2] = '\0';
12176 target_percent_s_newline[0] = target_percent;
12177 target_percent_s_newline[1] = target_s;
12178 target_percent_s_newline[2] = target_newline;
12179 target_percent_s_newline[3] = '\0';
12181 init = true;
12183 return true;
12186 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12187 and no overflow/underflow occurred. INEXACT is true if M was not
12188 exactly calculated. TYPE is the tree type for the result. This
12189 function assumes that you cleared the MPFR flags and then
12190 calculated M to see if anything subsequently set a flag prior to
12191 entering this function. Return NULL_TREE if any checks fail. */
12193 static tree
12194 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12196 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12197 overflow/underflow occurred. If -frounding-math, proceed iff the
12198 result of calling FUNC was exact. */
12199 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12200 && (!flag_rounding_math || !inexact))
12202 REAL_VALUE_TYPE rr;
12204 real_from_mpfr (&rr, m);
12205 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12206 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12207 but the mpft_t is not, then we underflowed in the
12208 conversion. */
12209 if (!real_isnan (&rr) && !real_isinf (&rr)
12210 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12212 REAL_VALUE_TYPE rmode;
12214 real_convert (&rmode, TYPE_MODE (type), &rr);
12215 /* Proceed iff the specified mode can hold the value. */
12216 if (real_identical (&rmode, &rr))
12217 return build_real (type, rmode);
12220 return NULL_TREE;
12223 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12224 FUNC on it and return the resulting value as a tree with type TYPE.
12225 If MIN and/or MAX are not NULL, then the supplied ARG must be
12226 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12227 acceptable values, otherwise they are not. The mpfr precision is
12228 set to the precision of TYPE. We assume that function FUNC returns
12229 zero if the result could be calculated exactly within the requested
12230 precision. */
12232 static tree
12233 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12234 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12235 bool inclusive)
12237 tree result = NULL_TREE;
12239 STRIP_NOPS (arg);
12241 /* To proceed, MPFR must exactly represent the target floating point
12242 format, which only happens when the target base equals two. */
12243 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12244 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12246 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12248 if (!real_isnan (ra) && !real_isinf (ra)
12249 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12250 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12252 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12253 int inexact;
12254 mpfr_t m;
12256 mpfr_init2 (m, prec);
12257 mpfr_from_real (m, ra);
12258 mpfr_clear_flags ();
12259 inexact = func (m, m, GMP_RNDN);
12260 result = do_mpfr_ckconv (m, type, inexact);
12261 mpfr_clear (m);
12265 return result;
12268 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12269 FUNC on it and return the resulting value as a tree with type TYPE.
12270 The mpfr precision is set to the precision of TYPE. We assume that
12271 function FUNC returns zero if the result could be calculated
12272 exactly within the requested precision. */
12274 static tree
12275 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12276 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12278 tree result = NULL_TREE;
12280 STRIP_NOPS (arg1);
12281 STRIP_NOPS (arg2);
12283 /* To proceed, MPFR must exactly represent the target floating point
12284 format, which only happens when the target base equals two. */
12285 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12286 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12287 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12289 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12290 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12292 if (!real_isnan (ra1) && !real_isinf (ra1)
12293 && !real_isnan (ra2) && !real_isinf (ra2))
12295 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12296 int inexact;
12297 mpfr_t m1, m2;
12299 mpfr_inits2 (prec, m1, m2, NULL);
12300 mpfr_from_real (m1, ra1);
12301 mpfr_from_real (m2, ra2);
12302 mpfr_clear_flags ();
12303 inexact = func (m1, m1, m2, GMP_RNDN);
12304 result = do_mpfr_ckconv (m1, type, inexact);
12305 mpfr_clears (m1, m2, NULL);
12309 return result;
12312 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12313 FUNC on it and return the resulting value as a tree with type TYPE.
12314 The mpfr precision is set to the precision of TYPE. We assume that
12315 function FUNC returns zero if the result could be calculated
12316 exactly within the requested precision. */
12318 static tree
12319 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12320 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12322 tree result = NULL_TREE;
12324 STRIP_NOPS (arg1);
12325 STRIP_NOPS (arg2);
12326 STRIP_NOPS (arg3);
12328 /* To proceed, MPFR must exactly represent the target floating point
12329 format, which only happens when the target base equals two. */
12330 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12331 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12332 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12333 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12335 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12336 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12337 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12339 if (!real_isnan (ra1) && !real_isinf (ra1)
12340 && !real_isnan (ra2) && !real_isinf (ra2)
12341 && !real_isnan (ra3) && !real_isinf (ra3))
12343 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12344 int inexact;
12345 mpfr_t m1, m2, m3;
12347 mpfr_inits2 (prec, m1, m2, m3, NULL);
12348 mpfr_from_real (m1, ra1);
12349 mpfr_from_real (m2, ra2);
12350 mpfr_from_real (m3, ra3);
12351 mpfr_clear_flags ();
12352 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12353 result = do_mpfr_ckconv (m1, type, inexact);
12354 mpfr_clears (m1, m2, m3, NULL);
12358 return result;
12361 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12362 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12363 If ARG_SINP and ARG_COSP are NULL then the result is returned
12364 as a complex value.
12365 The type is taken from the type of ARG and is used for setting the
12366 precision of the calculation and results. */
12368 static tree
12369 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12371 tree const type = TREE_TYPE (arg);
12372 tree result = NULL_TREE;
12374 STRIP_NOPS (arg);
12376 /* To proceed, MPFR must exactly represent the target floating point
12377 format, which only happens when the target base equals two. */
12378 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12379 && TREE_CODE (arg) == REAL_CST
12380 && !TREE_OVERFLOW (arg))
12382 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12384 if (!real_isnan (ra) && !real_isinf (ra))
12386 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12387 tree result_s, result_c;
12388 int inexact;
12389 mpfr_t m, ms, mc;
12391 mpfr_inits2 (prec, m, ms, mc, NULL);
12392 mpfr_from_real (m, ra);
12393 mpfr_clear_flags ();
12394 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12395 result_s = do_mpfr_ckconv (ms, type, inexact);
12396 result_c = do_mpfr_ckconv (mc, type, inexact);
12397 mpfr_clears (m, ms, mc, NULL);
12398 if (result_s && result_c)
12400 /* If we are to return in a complex value do so. */
12401 if (!arg_sinp && !arg_cosp)
12402 return build_complex (build_complex_type (type),
12403 result_c, result_s);
12405 /* Dereference the sin/cos pointer arguments. */
12406 arg_sinp = build_fold_indirect_ref (arg_sinp);
12407 arg_cosp = build_fold_indirect_ref (arg_cosp);
12408 /* Proceed if valid pointer type were passed in. */
12409 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12410 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12412 /* Set the values. */
12413 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12414 result_s);
12415 TREE_SIDE_EFFECTS (result_s) = 1;
12416 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12417 result_c);
12418 TREE_SIDE_EFFECTS (result_c) = 1;
12419 /* Combine the assignments into a compound expr. */
12420 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12421 result_s, result_c));
12426 return result;