* tree-sra.c (sra_build_assignment): Disable assertion checking
[official-gcc.git] / gcc / builtins.c
blob67d9bf63f2558501e33b4c84de53f998f1e5c8cc
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "tree-gimple.h"
32 #include "flags.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "except.h"
36 #include "function.h"
37 #include "insn-config.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "predict.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
67 #undef DEF_BUILTIN
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
85 #endif
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
111 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
112 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
113 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
119 enum machine_mode, int);
120 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
121 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
122 enum machine_mode, int);
123 static rtx expand_builtin_bcopy (tree, int);
124 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
125 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
127 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memcmp (tree, tree, tree);
176 static tree fold_builtin_strcmp (tree, tree);
177 static tree fold_builtin_strncmp (tree, tree, tree);
178 static tree fold_builtin_signbit (tree, tree);
179 static tree fold_builtin_copysign (tree, tree, tree, tree);
180 static tree fold_builtin_isascii (tree);
181 static tree fold_builtin_toascii (tree);
182 static tree fold_builtin_isdigit (tree);
183 static tree fold_builtin_fabs (tree, tree);
184 static tree fold_builtin_abs (tree, tree);
185 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
186 enum tree_code);
187 static tree fold_builtin_n (tree, tree *, int, bool);
188 static tree fold_builtin_0 (tree, bool);
189 static tree fold_builtin_1 (tree, tree, bool);
190 static tree fold_builtin_2 (tree, tree, tree, bool);
191 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
192 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
193 static tree fold_builtin_varargs (tree, tree, bool);
195 static tree fold_builtin_strpbrk (tree, tree, tree);
196 static tree fold_builtin_strstr (tree, tree, tree);
197 static tree fold_builtin_strrchr (tree, tree, tree);
198 static tree fold_builtin_strcat (tree, tree);
199 static tree fold_builtin_strncat (tree, tree, tree);
200 static tree fold_builtin_strspn (tree, tree);
201 static tree fold_builtin_strcspn (tree, tree);
202 static tree fold_builtin_sprintf (tree, tree, tree, int);
204 static rtx expand_builtin_object_size (tree);
205 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
206 enum built_in_function);
207 static void maybe_emit_chk_warning (tree, enum built_in_function);
208 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
209 static tree fold_builtin_object_size (tree, tree);
210 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
211 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
212 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
213 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
214 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
215 enum built_in_function);
216 static bool init_target_chars (void);
218 static unsigned HOST_WIDE_INT target_newline;
219 static unsigned HOST_WIDE_INT target_percent;
220 static unsigned HOST_WIDE_INT target_c;
221 static unsigned HOST_WIDE_INT target_s;
222 static char target_percent_c[3];
223 static char target_percent_s[3];
224 static char target_percent_s_newline[4];
225 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
226 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
227 static tree do_mpfr_arg2 (tree, tree, tree,
228 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
229 static tree do_mpfr_arg3 (tree, tree, tree, tree,
230 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
231 static tree do_mpfr_sincos (tree, tree, tree);
233 /* Return true if NODE should be considered for inline expansion regardless
234 of the optimization level. This means whenever a function is invoked with
235 its "internal" name, which normally contains the prefix "__builtin". */
237 static bool called_as_built_in (tree node)
239 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
240 if (strncmp (name, "__builtin_", 10) == 0)
241 return true;
242 if (strncmp (name, "__sync_", 7) == 0)
243 return true;
244 return false;
247 /* Return the alignment in bits of EXP, a pointer valued expression.
248 But don't return more than MAX_ALIGN no matter what.
249 The alignment returned is, by default, the alignment of the thing that
250 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
252 Otherwise, look at the expression to see if we can do better, i.e., if the
253 expression is actually pointing at an object whose alignment is tighter. */
256 get_pointer_alignment (tree exp, unsigned int max_align)
258 unsigned int align, inner;
260 /* We rely on TER to compute accurate alignment information. */
261 if (!(optimize && flag_tree_ter))
262 return 0;
264 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
265 return 0;
267 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
268 align = MIN (align, max_align);
270 while (1)
272 switch (TREE_CODE (exp))
274 case NOP_EXPR:
275 case CONVERT_EXPR:
276 case NON_LVALUE_EXPR:
277 exp = TREE_OPERAND (exp, 0);
278 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
279 return align;
281 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
282 align = MIN (inner, max_align);
283 break;
285 case PLUS_EXPR:
286 /* If sum of pointer + int, restrict our maximum alignment to that
287 imposed by the integer. If not, we can't do any better than
288 ALIGN. */
289 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
290 return align;
292 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
293 & (max_align / BITS_PER_UNIT - 1))
294 != 0)
295 max_align >>= 1;
297 exp = TREE_OPERAND (exp, 0);
298 break;
300 case ADDR_EXPR:
301 /* See what we are pointing at and look at its alignment. */
302 exp = TREE_OPERAND (exp, 0);
303 inner = max_align;
304 if (handled_component_p (exp))
306 HOST_WIDE_INT bitsize, bitpos;
307 tree offset;
308 enum machine_mode mode;
309 int unsignedp, volatilep;
311 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
312 &mode, &unsignedp, &volatilep, true);
313 if (bitpos)
314 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
315 if (offset && TREE_CODE (offset) == PLUS_EXPR
316 && host_integerp (TREE_OPERAND (offset, 1), 1))
318 /* Any overflow in calculating offset_bits won't change
319 the alignment. */
320 unsigned offset_bits
321 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
322 * BITS_PER_UNIT);
324 if (offset_bits)
325 inner = MIN (inner, (offset_bits & -offset_bits));
326 offset = TREE_OPERAND (offset, 0);
328 if (offset && TREE_CODE (offset) == MULT_EXPR
329 && host_integerp (TREE_OPERAND (offset, 1), 1))
331 /* Any overflow in calculating offset_factor won't change
332 the alignment. */
333 unsigned offset_factor
334 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
335 * BITS_PER_UNIT);
337 if (offset_factor)
338 inner = MIN (inner, (offset_factor & -offset_factor));
340 else if (offset)
341 inner = MIN (inner, BITS_PER_UNIT);
343 if (TREE_CODE (exp) == FUNCTION_DECL)
344 align = FUNCTION_BOUNDARY;
345 else if (DECL_P (exp))
346 align = MIN (inner, DECL_ALIGN (exp));
347 #ifdef CONSTANT_ALIGNMENT
348 else if (CONSTANT_CLASS_P (exp))
349 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
350 #endif
351 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
352 || TREE_CODE (exp) == INDIRECT_REF)
353 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
354 else
355 align = MIN (align, inner);
356 return MIN (align, max_align);
358 default:
359 return align;
364 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
365 way, because it could contain a zero byte in the middle.
366 TREE_STRING_LENGTH is the size of the character array, not the string.
368 ONLY_VALUE should be nonzero if the result is not going to be emitted
369 into the instruction stream and zero if it is going to be expanded.
370 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
371 is returned, otherwise NULL, since
372 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
373 evaluate the side-effects.
375 The value returned is of type `ssizetype'.
377 Unfortunately, string_constant can't access the values of const char
378 arrays with initializers, so neither can we do so here. */
380 tree
381 c_strlen (tree src, int only_value)
383 tree offset_node;
384 HOST_WIDE_INT offset;
385 int max;
386 const char *ptr;
388 STRIP_NOPS (src);
389 if (TREE_CODE (src) == COND_EXPR
390 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
392 tree len1, len2;
394 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
395 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
396 if (tree_int_cst_equal (len1, len2))
397 return len1;
400 if (TREE_CODE (src) == COMPOUND_EXPR
401 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
402 return c_strlen (TREE_OPERAND (src, 1), only_value);
404 src = string_constant (src, &offset_node);
405 if (src == 0)
406 return NULL_TREE;
408 max = TREE_STRING_LENGTH (src) - 1;
409 ptr = TREE_STRING_POINTER (src);
411 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
413 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
414 compute the offset to the following null if we don't know where to
415 start searching for it. */
416 int i;
418 for (i = 0; i < max; i++)
419 if (ptr[i] == 0)
420 return NULL_TREE;
422 /* We don't know the starting offset, but we do know that the string
423 has no internal zero bytes. We can assume that the offset falls
424 within the bounds of the string; otherwise, the programmer deserves
425 what he gets. Subtract the offset from the length of the string,
426 and return that. This would perhaps not be valid if we were dealing
427 with named arrays in addition to literal string constants. */
429 return size_diffop (size_int (max), offset_node);
432 /* We have a known offset into the string. Start searching there for
433 a null character if we can represent it as a single HOST_WIDE_INT. */
434 if (offset_node == 0)
435 offset = 0;
436 else if (! host_integerp (offset_node, 0))
437 offset = -1;
438 else
439 offset = tree_low_cst (offset_node, 0);
441 /* If the offset is known to be out of bounds, warn, and call strlen at
442 runtime. */
443 if (offset < 0 || offset > max)
445 warning (0, "offset outside bounds of constant string");
446 return NULL_TREE;
449 /* Use strlen to search for the first zero byte. Since any strings
450 constructed with build_string will have nulls appended, we win even
451 if we get handed something like (char[4])"abcd".
453 Since OFFSET is our starting index into the string, no further
454 calculation is needed. */
455 return ssize_int (strlen (ptr + offset));
458 /* Return a char pointer for a C string if it is a string constant
459 or sum of string constant and integer constant. */
461 static const char *
462 c_getstr (tree src)
464 tree offset_node;
466 src = string_constant (src, &offset_node);
467 if (src == 0)
468 return 0;
470 if (offset_node == 0)
471 return TREE_STRING_POINTER (src);
472 else if (!host_integerp (offset_node, 1)
473 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
474 return 0;
476 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
479 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
480 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
482 static rtx
483 c_readstr (const char *str, enum machine_mode mode)
485 HOST_WIDE_INT c[2];
486 HOST_WIDE_INT ch;
487 unsigned int i, j;
489 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
491 c[0] = 0;
492 c[1] = 0;
493 ch = 1;
494 for (i = 0; i < GET_MODE_SIZE (mode); i++)
496 j = i;
497 if (WORDS_BIG_ENDIAN)
498 j = GET_MODE_SIZE (mode) - i - 1;
499 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
500 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
501 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
502 j *= BITS_PER_UNIT;
503 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
505 if (ch)
506 ch = (unsigned char) str[i];
507 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
509 return immed_double_const (c[0], c[1], mode);
512 /* Cast a target constant CST to target CHAR and if that value fits into
513 host char type, return zero and put that value into variable pointed to by
514 P. */
516 static int
517 target_char_cast (tree cst, char *p)
519 unsigned HOST_WIDE_INT val, hostval;
521 if (!host_integerp (cst, 1)
522 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
523 return 1;
525 val = tree_low_cst (cst, 1);
526 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
527 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
529 hostval = val;
530 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
531 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
533 if (val != hostval)
534 return 1;
536 *p = hostval;
537 return 0;
540 /* Similar to save_expr, but assumes that arbitrary code is not executed
541 in between the multiple evaluations. In particular, we assume that a
542 non-addressable local variable will not be modified. */
544 static tree
545 builtin_save_expr (tree exp)
547 if (TREE_ADDRESSABLE (exp) == 0
548 && (TREE_CODE (exp) == PARM_DECL
549 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
550 return exp;
552 return save_expr (exp);
555 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
556 times to get the address of either a higher stack frame, or a return
557 address located within it (depending on FNDECL_CODE). */
559 static rtx
560 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
562 int i;
564 #ifdef INITIAL_FRAME_ADDRESS_RTX
565 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
566 #else
567 rtx tem;
569 /* For a zero count with __builtin_return_address, we don't care what
570 frame address we return, because target-specific definitions will
571 override us. Therefore frame pointer elimination is OK, and using
572 the soft frame pointer is OK.
574 For a nonzero count, or a zero count with __builtin_frame_address,
575 we require a stable offset from the current frame pointer to the
576 previous one, so we must use the hard frame pointer, and
577 we must disable frame pointer elimination. */
578 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
579 tem = frame_pointer_rtx;
580 else
582 tem = hard_frame_pointer_rtx;
584 /* Tell reload not to eliminate the frame pointer. */
585 current_function_accesses_prior_frames = 1;
587 #endif
589 /* Some machines need special handling before we can access
590 arbitrary frames. For example, on the SPARC, we must first flush
591 all register windows to the stack. */
592 #ifdef SETUP_FRAME_ADDRESSES
593 if (count > 0)
594 SETUP_FRAME_ADDRESSES ();
595 #endif
597 /* On the SPARC, the return address is not in the frame, it is in a
598 register. There is no way to access it off of the current frame
599 pointer, but it can be accessed off the previous frame pointer by
600 reading the value from the register window save area. */
601 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
602 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
603 count--;
604 #endif
606 /* Scan back COUNT frames to the specified frame. */
607 for (i = 0; i < count; i++)
609 /* Assume the dynamic chain pointer is in the word that the
610 frame address points to, unless otherwise specified. */
611 #ifdef DYNAMIC_CHAIN_ADDRESS
612 tem = DYNAMIC_CHAIN_ADDRESS (tem);
613 #endif
614 tem = memory_address (Pmode, tem);
615 tem = gen_frame_mem (Pmode, tem);
616 tem = copy_to_reg (tem);
619 /* For __builtin_frame_address, return what we've got. But, on
620 the SPARC for example, we may have to add a bias. */
621 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
622 #ifdef FRAME_ADDR_RTX
623 return FRAME_ADDR_RTX (tem);
624 #else
625 return tem;
626 #endif
628 /* For __builtin_return_address, get the return address from that frame. */
629 #ifdef RETURN_ADDR_RTX
630 tem = RETURN_ADDR_RTX (count, tem);
631 #else
632 tem = memory_address (Pmode,
633 plus_constant (tem, GET_MODE_SIZE (Pmode)));
634 tem = gen_frame_mem (Pmode, tem);
635 #endif
636 return tem;
639 /* Alias set used for setjmp buffer. */
640 static HOST_WIDE_INT setjmp_alias_set = -1;
642 /* Construct the leading half of a __builtin_setjmp call. Control will
643 return to RECEIVER_LABEL. This is also called directly by the SJLJ
644 exception handling code. */
646 void
647 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
649 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
650 rtx stack_save;
651 rtx mem;
653 if (setjmp_alias_set == -1)
654 setjmp_alias_set = new_alias_set ();
656 buf_addr = convert_memory_address (Pmode, buf_addr);
658 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
660 /* We store the frame pointer and the address of receiver_label in
661 the buffer and use the rest of it for the stack save area, which
662 is machine-dependent. */
664 mem = gen_rtx_MEM (Pmode, buf_addr);
665 set_mem_alias_set (mem, setjmp_alias_set);
666 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
668 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
669 set_mem_alias_set (mem, setjmp_alias_set);
671 emit_move_insn (validize_mem (mem),
672 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
674 stack_save = gen_rtx_MEM (sa_mode,
675 plus_constant (buf_addr,
676 2 * GET_MODE_SIZE (Pmode)));
677 set_mem_alias_set (stack_save, setjmp_alias_set);
678 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
680 /* If there is further processing to do, do it. */
681 #ifdef HAVE_builtin_setjmp_setup
682 if (HAVE_builtin_setjmp_setup)
683 emit_insn (gen_builtin_setjmp_setup (buf_addr));
684 #endif
686 /* Tell optimize_save_area_alloca that extra work is going to
687 need to go on during alloca. */
688 current_function_calls_setjmp = 1;
690 /* Set this so all the registers get saved in our frame; we need to be
691 able to copy the saved values for any registers from frames we unwind. */
692 current_function_has_nonlocal_label = 1;
695 /* Construct the trailing part of a __builtin_setjmp call. This is
696 also called directly by the SJLJ exception handling code. */
698 void
699 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
701 /* Clobber the FP when we get here, so we have to make sure it's
702 marked as used by this function. */
703 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
705 /* Mark the static chain as clobbered here so life information
706 doesn't get messed up for it. */
707 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
709 /* Now put in the code to restore the frame pointer, and argument
710 pointer, if needed. */
711 #ifdef HAVE_nonlocal_goto
712 if (! HAVE_nonlocal_goto)
713 #endif
715 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
716 /* This might change the hard frame pointer in ways that aren't
717 apparent to early optimization passes, so force a clobber. */
718 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
721 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
722 if (fixed_regs[ARG_POINTER_REGNUM])
724 #ifdef ELIMINABLE_REGS
725 size_t i;
726 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
728 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
729 if (elim_regs[i].from == ARG_POINTER_REGNUM
730 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
731 break;
733 if (i == ARRAY_SIZE (elim_regs))
734 #endif
736 /* Now restore our arg pointer from the address at which it
737 was saved in our stack frame. */
738 emit_move_insn (virtual_incoming_args_rtx,
739 copy_to_reg (get_arg_pointer_save_area (cfun)));
742 #endif
744 #ifdef HAVE_builtin_setjmp_receiver
745 if (HAVE_builtin_setjmp_receiver)
746 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
747 else
748 #endif
749 #ifdef HAVE_nonlocal_goto_receiver
750 if (HAVE_nonlocal_goto_receiver)
751 emit_insn (gen_nonlocal_goto_receiver ());
752 else
753 #endif
754 { /* Nothing */ }
756 /* @@@ This is a kludge. Not all machine descriptions define a blockage
757 insn, but we must not allow the code we just generated to be reordered
758 by scheduling. Specifically, the update of the frame pointer must
759 happen immediately, not later. So emit an ASM_INPUT to act as blockage
760 insn. */
761 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
764 /* __builtin_longjmp is passed a pointer to an array of five words (not
765 all will be used on all machines). It operates similarly to the C
766 library function of the same name, but is more efficient. Much of
767 the code below is copied from the handling of non-local gotos. */
769 static void
770 expand_builtin_longjmp (rtx buf_addr, rtx value)
772 rtx fp, lab, stack, insn, last;
773 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
775 if (setjmp_alias_set == -1)
776 setjmp_alias_set = new_alias_set ();
778 buf_addr = convert_memory_address (Pmode, buf_addr);
780 buf_addr = force_reg (Pmode, buf_addr);
782 /* We used to store value in static_chain_rtx, but that fails if pointers
783 are smaller than integers. We instead require that the user must pass
784 a second argument of 1, because that is what builtin_setjmp will
785 return. This also makes EH slightly more efficient, since we are no
786 longer copying around a value that we don't care about. */
787 gcc_assert (value == const1_rtx);
789 last = get_last_insn ();
790 #ifdef HAVE_builtin_longjmp
791 if (HAVE_builtin_longjmp)
792 emit_insn (gen_builtin_longjmp (buf_addr));
793 else
794 #endif
796 fp = gen_rtx_MEM (Pmode, buf_addr);
797 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
798 GET_MODE_SIZE (Pmode)));
800 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
801 2 * GET_MODE_SIZE (Pmode)));
802 set_mem_alias_set (fp, setjmp_alias_set);
803 set_mem_alias_set (lab, setjmp_alias_set);
804 set_mem_alias_set (stack, setjmp_alias_set);
806 /* Pick up FP, label, and SP from the block and jump. This code is
807 from expand_goto in stmt.c; see there for detailed comments. */
808 #ifdef HAVE_nonlocal_goto
809 if (HAVE_nonlocal_goto)
810 /* We have to pass a value to the nonlocal_goto pattern that will
811 get copied into the static_chain pointer, but it does not matter
812 what that value is, because builtin_setjmp does not use it. */
813 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
814 else
815 #endif
817 lab = copy_to_reg (lab);
819 emit_insn (gen_rtx_CLOBBER (VOIDmode,
820 gen_rtx_MEM (BLKmode,
821 gen_rtx_SCRATCH (VOIDmode))));
822 emit_insn (gen_rtx_CLOBBER (VOIDmode,
823 gen_rtx_MEM (BLKmode,
824 hard_frame_pointer_rtx)));
826 emit_move_insn (hard_frame_pointer_rtx, fp);
827 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
829 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
830 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
831 emit_indirect_jump (lab);
835 /* Search backwards and mark the jump insn as a non-local goto.
836 Note that this precludes the use of __builtin_longjmp to a
837 __builtin_setjmp target in the same function. However, we've
838 already cautioned the user that these functions are for
839 internal exception handling use only. */
840 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
842 gcc_assert (insn != last);
844 if (JUMP_P (insn))
846 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
847 REG_NOTES (insn));
848 break;
850 else if (CALL_P (insn))
851 break;
855 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
856 and the address of the save area. */
858 static rtx
859 expand_builtin_nonlocal_goto (tree exp)
861 tree t_label, t_save_area;
862 rtx r_label, r_save_area, r_fp, r_sp, insn;
864 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
865 return NULL_RTX;
867 t_label = CALL_EXPR_ARG (exp, 0);
868 t_save_area = CALL_EXPR_ARG (exp, 1);
870 r_label = expand_normal (t_label);
871 r_label = convert_memory_address (Pmode, r_label);
872 r_save_area = expand_normal (t_save_area);
873 r_save_area = convert_memory_address (Pmode, r_save_area);
874 r_fp = gen_rtx_MEM (Pmode, r_save_area);
875 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
876 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
878 current_function_has_nonlocal_goto = 1;
880 #ifdef HAVE_nonlocal_goto
881 /* ??? We no longer need to pass the static chain value, afaik. */
882 if (HAVE_nonlocal_goto)
883 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
884 else
885 #endif
887 r_label = copy_to_reg (r_label);
889 emit_insn (gen_rtx_CLOBBER (VOIDmode,
890 gen_rtx_MEM (BLKmode,
891 gen_rtx_SCRATCH (VOIDmode))));
893 emit_insn (gen_rtx_CLOBBER (VOIDmode,
894 gen_rtx_MEM (BLKmode,
895 hard_frame_pointer_rtx)));
897 /* Restore frame pointer for containing function.
898 This sets the actual hard register used for the frame pointer
899 to the location of the function's incoming static chain info.
900 The non-local goto handler will then adjust it to contain the
901 proper value and reload the argument pointer, if needed. */
902 emit_move_insn (hard_frame_pointer_rtx, r_fp);
903 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
905 /* USE of hard_frame_pointer_rtx added for consistency;
906 not clear if really needed. */
907 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
908 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
909 emit_indirect_jump (r_label);
912 /* Search backwards to the jump insn and mark it as a
913 non-local goto. */
914 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
916 if (JUMP_P (insn))
918 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
919 const0_rtx, REG_NOTES (insn));
920 break;
922 else if (CALL_P (insn))
923 break;
926 return const0_rtx;
929 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
930 (not all will be used on all machines) that was passed to __builtin_setjmp.
931 It updates the stack pointer in that block to correspond to the current
932 stack pointer. */
934 static void
935 expand_builtin_update_setjmp_buf (rtx buf_addr)
937 enum machine_mode sa_mode = Pmode;
938 rtx stack_save;
941 #ifdef HAVE_save_stack_nonlocal
942 if (HAVE_save_stack_nonlocal)
943 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
944 #endif
945 #ifdef STACK_SAVEAREA_MODE
946 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
947 #endif
949 stack_save
950 = gen_rtx_MEM (sa_mode,
951 memory_address
952 (sa_mode,
953 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
955 #ifdef HAVE_setjmp
956 if (HAVE_setjmp)
957 emit_insn (gen_setjmp ());
958 #endif
960 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
963 /* Expand a call to __builtin_prefetch. For a target that does not support
964 data prefetch, evaluate the memory address argument in case it has side
965 effects. */
967 static void
968 expand_builtin_prefetch (tree exp)
970 tree arg0, arg1, arg2;
971 int nargs;
972 rtx op0, op1, op2;
974 if (!validate_arglist (exp, POINTER_TYPE, 0))
975 return;
977 arg0 = CALL_EXPR_ARG (exp, 0);
979 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
980 zero (read) and argument 2 (locality) defaults to 3 (high degree of
981 locality). */
982 nargs = call_expr_nargs (exp);
983 if (nargs > 1)
984 arg1 = CALL_EXPR_ARG (exp, 1);
985 else
986 arg1 = integer_zero_node;
987 if (nargs > 2)
988 arg2 = CALL_EXPR_ARG (exp, 2);
989 else
990 arg2 = build_int_cst (NULL_TREE, 3);
992 /* Argument 0 is an address. */
993 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
995 /* Argument 1 (read/write flag) must be a compile-time constant int. */
996 if (TREE_CODE (arg1) != INTEGER_CST)
998 error ("second argument to %<__builtin_prefetch%> must be a constant");
999 arg1 = integer_zero_node;
1001 op1 = expand_normal (arg1);
1002 /* Argument 1 must be either zero or one. */
1003 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1005 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1006 " using zero");
1007 op1 = const0_rtx;
1010 /* Argument 2 (locality) must be a compile-time constant int. */
1011 if (TREE_CODE (arg2) != INTEGER_CST)
1013 error ("third argument to %<__builtin_prefetch%> must be a constant");
1014 arg2 = integer_zero_node;
1016 op2 = expand_normal (arg2);
1017 /* Argument 2 must be 0, 1, 2, or 3. */
1018 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1020 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1021 op2 = const0_rtx;
1024 #ifdef HAVE_prefetch
1025 if (HAVE_prefetch)
1027 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1028 (op0,
1029 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1030 || (GET_MODE (op0) != Pmode))
1032 op0 = convert_memory_address (Pmode, op0);
1033 op0 = force_reg (Pmode, op0);
1035 emit_insn (gen_prefetch (op0, op1, op2));
1037 #endif
1039 /* Don't do anything with direct references to volatile memory, but
1040 generate code to handle other side effects. */
1041 if (!MEM_P (op0) && side_effects_p (op0))
1042 emit_insn (op0);
1045 /* Get a MEM rtx for expression EXP which is the address of an operand
1046 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1047 the maximum length of the block of memory that might be accessed or
1048 NULL if unknown. */
1050 static rtx
1051 get_memory_rtx (tree exp, tree len)
1053 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1054 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1056 /* Get an expression we can use to find the attributes to assign to MEM.
1057 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1058 we can. First remove any nops. */
1059 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1060 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1061 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1062 exp = TREE_OPERAND (exp, 0);
1064 if (TREE_CODE (exp) == ADDR_EXPR)
1065 exp = TREE_OPERAND (exp, 0);
1066 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1067 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1068 else
1069 exp = NULL;
1071 /* Honor attributes derived from exp, except for the alias set
1072 (as builtin stringops may alias with anything) and the size
1073 (as stringops may access multiple array elements). */
1074 if (exp)
1076 set_mem_attributes (mem, exp, 0);
1078 /* Allow the string and memory builtins to overflow from one
1079 field into another, see http://gcc.gnu.org/PR23561.
1080 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1081 memory accessed by the string or memory builtin will fit
1082 within the field. */
1083 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1085 tree mem_expr = MEM_EXPR (mem);
1086 HOST_WIDE_INT offset = -1, length = -1;
1087 tree inner = exp;
1089 while (TREE_CODE (inner) == ARRAY_REF
1090 || TREE_CODE (inner) == NOP_EXPR
1091 || TREE_CODE (inner) == CONVERT_EXPR
1092 || TREE_CODE (inner) == NON_LVALUE_EXPR
1093 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1094 || TREE_CODE (inner) == SAVE_EXPR)
1095 inner = TREE_OPERAND (inner, 0);
1097 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1099 if (MEM_OFFSET (mem)
1100 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1101 offset = INTVAL (MEM_OFFSET (mem));
1103 if (offset >= 0 && len && host_integerp (len, 0))
1104 length = tree_low_cst (len, 0);
1106 while (TREE_CODE (inner) == COMPONENT_REF)
1108 tree field = TREE_OPERAND (inner, 1);
1109 gcc_assert (! DECL_BIT_FIELD (field));
1110 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1111 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1113 if (length >= 0
1114 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1115 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1117 HOST_WIDE_INT size
1118 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1119 /* If we can prove the memory starting at XEXP (mem, 0)
1120 and ending at XEXP (mem, 0) + LENGTH will fit into
1121 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1122 if (offset <= size
1123 && length <= size
1124 && offset + length <= size)
1125 break;
1128 if (offset >= 0
1129 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1130 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1131 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1132 / BITS_PER_UNIT;
1133 else
1135 offset = -1;
1136 length = -1;
1139 mem_expr = TREE_OPERAND (mem_expr, 0);
1140 inner = TREE_OPERAND (inner, 0);
1143 if (mem_expr == NULL)
1144 offset = -1;
1145 if (mem_expr != MEM_EXPR (mem))
1147 set_mem_expr (mem, mem_expr);
1148 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1151 set_mem_alias_set (mem, 0);
1152 set_mem_size (mem, NULL_RTX);
1155 return mem;
1158 /* Built-in functions to perform an untyped call and return. */
1160 /* For each register that may be used for calling a function, this
1161 gives a mode used to copy the register's value. VOIDmode indicates
1162 the register is not used for calling a function. If the machine
1163 has register windows, this gives only the outbound registers.
1164 INCOMING_REGNO gives the corresponding inbound register. */
1165 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1167 /* For each register that may be used for returning values, this gives
1168 a mode used to copy the register's value. VOIDmode indicates the
1169 register is not used for returning values. If the machine has
1170 register windows, this gives only the outbound registers.
1171 INCOMING_REGNO gives the corresponding inbound register. */
1172 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1174 /* For each register that may be used for calling a function, this
1175 gives the offset of that register into the block returned by
1176 __builtin_apply_args. 0 indicates that the register is not
1177 used for calling a function. */
1178 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1180 /* Return the size required for the block returned by __builtin_apply_args,
1181 and initialize apply_args_mode. */
1183 static int
1184 apply_args_size (void)
1186 static int size = -1;
1187 int align;
1188 unsigned int regno;
1189 enum machine_mode mode;
1191 /* The values computed by this function never change. */
1192 if (size < 0)
1194 /* The first value is the incoming arg-pointer. */
1195 size = GET_MODE_SIZE (Pmode);
1197 /* The second value is the structure value address unless this is
1198 passed as an "invisible" first argument. */
1199 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1200 size += GET_MODE_SIZE (Pmode);
1202 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1203 if (FUNCTION_ARG_REGNO_P (regno))
1205 mode = reg_raw_mode[regno];
1207 gcc_assert (mode != VOIDmode);
1209 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1210 if (size % align != 0)
1211 size = CEIL (size, align) * align;
1212 apply_args_reg_offset[regno] = size;
1213 size += GET_MODE_SIZE (mode);
1214 apply_args_mode[regno] = mode;
1216 else
1218 apply_args_mode[regno] = VOIDmode;
1219 apply_args_reg_offset[regno] = 0;
1222 return size;
1225 /* Return the size required for the block returned by __builtin_apply,
1226 and initialize apply_result_mode. */
1228 static int
1229 apply_result_size (void)
1231 static int size = -1;
1232 int align, regno;
1233 enum machine_mode mode;
1235 /* The values computed by this function never change. */
1236 if (size < 0)
1238 size = 0;
1240 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1241 if (FUNCTION_VALUE_REGNO_P (regno))
1243 mode = reg_raw_mode[regno];
1245 gcc_assert (mode != VOIDmode);
1247 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1248 if (size % align != 0)
1249 size = CEIL (size, align) * align;
1250 size += GET_MODE_SIZE (mode);
1251 apply_result_mode[regno] = mode;
1253 else
1254 apply_result_mode[regno] = VOIDmode;
1256 /* Allow targets that use untyped_call and untyped_return to override
1257 the size so that machine-specific information can be stored here. */
1258 #ifdef APPLY_RESULT_SIZE
1259 size = APPLY_RESULT_SIZE;
1260 #endif
1262 return size;
1265 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1266 /* Create a vector describing the result block RESULT. If SAVEP is true,
1267 the result block is used to save the values; otherwise it is used to
1268 restore the values. */
1270 static rtx
1271 result_vector (int savep, rtx result)
1273 int regno, size, align, nelts;
1274 enum machine_mode mode;
1275 rtx reg, mem;
1276 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1278 size = nelts = 0;
1279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1280 if ((mode = apply_result_mode[regno]) != VOIDmode)
1282 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1283 if (size % align != 0)
1284 size = CEIL (size, align) * align;
1285 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1286 mem = adjust_address (result, mode, size);
1287 savevec[nelts++] = (savep
1288 ? gen_rtx_SET (VOIDmode, mem, reg)
1289 : gen_rtx_SET (VOIDmode, reg, mem));
1290 size += GET_MODE_SIZE (mode);
1292 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1294 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1296 /* Save the state required to perform an untyped call with the same
1297 arguments as were passed to the current function. */
1299 static rtx
1300 expand_builtin_apply_args_1 (void)
1302 rtx registers, tem;
1303 int size, align, regno;
1304 enum machine_mode mode;
1305 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1307 /* Create a block where the arg-pointer, structure value address,
1308 and argument registers can be saved. */
1309 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1311 /* Walk past the arg-pointer and structure value address. */
1312 size = GET_MODE_SIZE (Pmode);
1313 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1314 size += GET_MODE_SIZE (Pmode);
1316 /* Save each register used in calling a function to the block. */
1317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1318 if ((mode = apply_args_mode[regno]) != VOIDmode)
1320 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1321 if (size % align != 0)
1322 size = CEIL (size, align) * align;
1324 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1326 emit_move_insn (adjust_address (registers, mode, size), tem);
1327 size += GET_MODE_SIZE (mode);
1330 /* Save the arg pointer to the block. */
1331 tem = copy_to_reg (virtual_incoming_args_rtx);
1332 #ifdef STACK_GROWS_DOWNWARD
1333 /* We need the pointer as the caller actually passed them to us, not
1334 as we might have pretended they were passed. Make sure it's a valid
1335 operand, as emit_move_insn isn't expected to handle a PLUS. */
1337 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1338 NULL_RTX);
1339 #endif
1340 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1342 size = GET_MODE_SIZE (Pmode);
1344 /* Save the structure value address unless this is passed as an
1345 "invisible" first argument. */
1346 if (struct_incoming_value)
1348 emit_move_insn (adjust_address (registers, Pmode, size),
1349 copy_to_reg (struct_incoming_value));
1350 size += GET_MODE_SIZE (Pmode);
1353 /* Return the address of the block. */
1354 return copy_addr_to_reg (XEXP (registers, 0));
1357 /* __builtin_apply_args returns block of memory allocated on
1358 the stack into which is stored the arg pointer, structure
1359 value address, static chain, and all the registers that might
1360 possibly be used in performing a function call. The code is
1361 moved to the start of the function so the incoming values are
1362 saved. */
1364 static rtx
1365 expand_builtin_apply_args (void)
1367 /* Don't do __builtin_apply_args more than once in a function.
1368 Save the result of the first call and reuse it. */
1369 if (apply_args_value != 0)
1370 return apply_args_value;
1372 /* When this function is called, it means that registers must be
1373 saved on entry to this function. So we migrate the
1374 call to the first insn of this function. */
1375 rtx temp;
1376 rtx seq;
1378 start_sequence ();
1379 temp = expand_builtin_apply_args_1 ();
1380 seq = get_insns ();
1381 end_sequence ();
1383 apply_args_value = temp;
1385 /* Put the insns after the NOTE that starts the function.
1386 If this is inside a start_sequence, make the outer-level insn
1387 chain current, so the code is placed at the start of the
1388 function. */
1389 push_topmost_sequence ();
1390 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1391 pop_topmost_sequence ();
1392 return temp;
1396 /* Perform an untyped call and save the state required to perform an
1397 untyped return of whatever value was returned by the given function. */
1399 static rtx
1400 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1402 int size, align, regno;
1403 enum machine_mode mode;
1404 rtx incoming_args, result, reg, dest, src, call_insn;
1405 rtx old_stack_level = 0;
1406 rtx call_fusage = 0;
1407 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1409 arguments = convert_memory_address (Pmode, arguments);
1411 /* Create a block where the return registers can be saved. */
1412 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1414 /* Fetch the arg pointer from the ARGUMENTS block. */
1415 incoming_args = gen_reg_rtx (Pmode);
1416 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1417 #ifndef STACK_GROWS_DOWNWARD
1418 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1419 incoming_args, 0, OPTAB_LIB_WIDEN);
1420 #endif
1422 /* Push a new argument block and copy the arguments. Do not allow
1423 the (potential) memcpy call below to interfere with our stack
1424 manipulations. */
1425 do_pending_stack_adjust ();
1426 NO_DEFER_POP;
1428 /* Save the stack with nonlocal if available. */
1429 #ifdef HAVE_save_stack_nonlocal
1430 if (HAVE_save_stack_nonlocal)
1431 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1432 else
1433 #endif
1434 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1436 /* Allocate a block of memory onto the stack and copy the memory
1437 arguments to the outgoing arguments address. */
1438 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1439 dest = virtual_outgoing_args_rtx;
1440 #ifndef STACK_GROWS_DOWNWARD
1441 if (GET_CODE (argsize) == CONST_INT)
1442 dest = plus_constant (dest, -INTVAL (argsize));
1443 else
1444 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1445 #endif
1446 dest = gen_rtx_MEM (BLKmode, dest);
1447 set_mem_align (dest, PARM_BOUNDARY);
1448 src = gen_rtx_MEM (BLKmode, incoming_args);
1449 set_mem_align (src, PARM_BOUNDARY);
1450 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1452 /* Refer to the argument block. */
1453 apply_args_size ();
1454 arguments = gen_rtx_MEM (BLKmode, arguments);
1455 set_mem_align (arguments, PARM_BOUNDARY);
1457 /* Walk past the arg-pointer and structure value address. */
1458 size = GET_MODE_SIZE (Pmode);
1459 if (struct_value)
1460 size += GET_MODE_SIZE (Pmode);
1462 /* Restore each of the registers previously saved. Make USE insns
1463 for each of these registers for use in making the call. */
1464 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1465 if ((mode = apply_args_mode[regno]) != VOIDmode)
1467 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1468 if (size % align != 0)
1469 size = CEIL (size, align) * align;
1470 reg = gen_rtx_REG (mode, regno);
1471 emit_move_insn (reg, adjust_address (arguments, mode, size));
1472 use_reg (&call_fusage, reg);
1473 size += GET_MODE_SIZE (mode);
1476 /* Restore the structure value address unless this is passed as an
1477 "invisible" first argument. */
1478 size = GET_MODE_SIZE (Pmode);
1479 if (struct_value)
1481 rtx value = gen_reg_rtx (Pmode);
1482 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1483 emit_move_insn (struct_value, value);
1484 if (REG_P (struct_value))
1485 use_reg (&call_fusage, struct_value);
1486 size += GET_MODE_SIZE (Pmode);
1489 /* All arguments and registers used for the call are set up by now! */
1490 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1492 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1493 and we don't want to load it into a register as an optimization,
1494 because prepare_call_address already did it if it should be done. */
1495 if (GET_CODE (function) != SYMBOL_REF)
1496 function = memory_address (FUNCTION_MODE, function);
1498 /* Generate the actual call instruction and save the return value. */
1499 #ifdef HAVE_untyped_call
1500 if (HAVE_untyped_call)
1501 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1502 result, result_vector (1, result)));
1503 else
1504 #endif
1505 #ifdef HAVE_call_value
1506 if (HAVE_call_value)
1508 rtx valreg = 0;
1510 /* Locate the unique return register. It is not possible to
1511 express a call that sets more than one return register using
1512 call_value; use untyped_call for that. In fact, untyped_call
1513 only needs to save the return registers in the given block. */
1514 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1515 if ((mode = apply_result_mode[regno]) != VOIDmode)
1517 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1519 valreg = gen_rtx_REG (mode, regno);
1522 emit_call_insn (GEN_CALL_VALUE (valreg,
1523 gen_rtx_MEM (FUNCTION_MODE, function),
1524 const0_rtx, NULL_RTX, const0_rtx));
1526 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1528 else
1529 #endif
1530 gcc_unreachable ();
1532 /* Find the CALL insn we just emitted, and attach the register usage
1533 information. */
1534 call_insn = last_call_insn ();
1535 add_function_usage_to (call_insn, call_fusage);
1537 /* Restore the stack. */
1538 #ifdef HAVE_save_stack_nonlocal
1539 if (HAVE_save_stack_nonlocal)
1540 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1541 else
1542 #endif
1543 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1545 OK_DEFER_POP;
1547 /* Return the address of the result block. */
1548 result = copy_addr_to_reg (XEXP (result, 0));
1549 return convert_memory_address (ptr_mode, result);
1552 /* Perform an untyped return. */
1554 static void
1555 expand_builtin_return (rtx result)
1557 int size, align, regno;
1558 enum machine_mode mode;
1559 rtx reg;
1560 rtx call_fusage = 0;
1562 result = convert_memory_address (Pmode, result);
1564 apply_result_size ();
1565 result = gen_rtx_MEM (BLKmode, result);
1567 #ifdef HAVE_untyped_return
1568 if (HAVE_untyped_return)
1570 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1571 emit_barrier ();
1572 return;
1574 #endif
1576 /* Restore the return value and note that each value is used. */
1577 size = 0;
1578 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1579 if ((mode = apply_result_mode[regno]) != VOIDmode)
1581 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1582 if (size % align != 0)
1583 size = CEIL (size, align) * align;
1584 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1585 emit_move_insn (reg, adjust_address (result, mode, size));
1587 push_to_sequence (call_fusage);
1588 emit_insn (gen_rtx_USE (VOIDmode, reg));
1589 call_fusage = get_insns ();
1590 end_sequence ();
1591 size += GET_MODE_SIZE (mode);
1594 /* Put the USE insns before the return. */
1595 emit_insn (call_fusage);
1597 /* Return whatever values was restored by jumping directly to the end
1598 of the function. */
1599 expand_naked_return ();
1602 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1604 static enum type_class
1605 type_to_class (tree type)
1607 switch (TREE_CODE (type))
1609 case VOID_TYPE: return void_type_class;
1610 case INTEGER_TYPE: return integer_type_class;
1611 case ENUMERAL_TYPE: return enumeral_type_class;
1612 case BOOLEAN_TYPE: return boolean_type_class;
1613 case POINTER_TYPE: return pointer_type_class;
1614 case REFERENCE_TYPE: return reference_type_class;
1615 case OFFSET_TYPE: return offset_type_class;
1616 case REAL_TYPE: return real_type_class;
1617 case COMPLEX_TYPE: return complex_type_class;
1618 case FUNCTION_TYPE: return function_type_class;
1619 case METHOD_TYPE: return method_type_class;
1620 case RECORD_TYPE: return record_type_class;
1621 case UNION_TYPE:
1622 case QUAL_UNION_TYPE: return union_type_class;
1623 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1624 ? string_type_class : array_type_class);
1625 case LANG_TYPE: return lang_type_class;
1626 default: return no_type_class;
1630 /* Expand a call EXP to __builtin_classify_type. */
1632 static rtx
1633 expand_builtin_classify_type (tree exp)
1635 if (call_expr_nargs (exp))
1636 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1637 return GEN_INT (no_type_class);
1640 /* This helper macro, meant to be used in mathfn_built_in below,
1641 determines which among a set of three builtin math functions is
1642 appropriate for a given type mode. The `F' and `L' cases are
1643 automatically generated from the `double' case. */
1644 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1645 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1646 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1647 fcodel = BUILT_IN_MATHFN##L ; break;
1649 /* Return mathematic function equivalent to FN but operating directly
1650 on TYPE, if available. If we can't do the conversion, return zero. */
1651 tree
1652 mathfn_built_in (tree type, enum built_in_function fn)
1654 enum built_in_function fcode, fcodef, fcodel;
1656 switch (fn)
1658 CASE_MATHFN (BUILT_IN_ACOS)
1659 CASE_MATHFN (BUILT_IN_ACOSH)
1660 CASE_MATHFN (BUILT_IN_ASIN)
1661 CASE_MATHFN (BUILT_IN_ASINH)
1662 CASE_MATHFN (BUILT_IN_ATAN)
1663 CASE_MATHFN (BUILT_IN_ATAN2)
1664 CASE_MATHFN (BUILT_IN_ATANH)
1665 CASE_MATHFN (BUILT_IN_CBRT)
1666 CASE_MATHFN (BUILT_IN_CEIL)
1667 CASE_MATHFN (BUILT_IN_CEXPI)
1668 CASE_MATHFN (BUILT_IN_COPYSIGN)
1669 CASE_MATHFN (BUILT_IN_COS)
1670 CASE_MATHFN (BUILT_IN_COSH)
1671 CASE_MATHFN (BUILT_IN_DREM)
1672 CASE_MATHFN (BUILT_IN_ERF)
1673 CASE_MATHFN (BUILT_IN_ERFC)
1674 CASE_MATHFN (BUILT_IN_EXP)
1675 CASE_MATHFN (BUILT_IN_EXP10)
1676 CASE_MATHFN (BUILT_IN_EXP2)
1677 CASE_MATHFN (BUILT_IN_EXPM1)
1678 CASE_MATHFN (BUILT_IN_FABS)
1679 CASE_MATHFN (BUILT_IN_FDIM)
1680 CASE_MATHFN (BUILT_IN_FLOOR)
1681 CASE_MATHFN (BUILT_IN_FMA)
1682 CASE_MATHFN (BUILT_IN_FMAX)
1683 CASE_MATHFN (BUILT_IN_FMIN)
1684 CASE_MATHFN (BUILT_IN_FMOD)
1685 CASE_MATHFN (BUILT_IN_FREXP)
1686 CASE_MATHFN (BUILT_IN_GAMMA)
1687 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1688 CASE_MATHFN (BUILT_IN_HYPOT)
1689 CASE_MATHFN (BUILT_IN_ILOGB)
1690 CASE_MATHFN (BUILT_IN_INF)
1691 CASE_MATHFN (BUILT_IN_ISINF)
1692 CASE_MATHFN (BUILT_IN_J0)
1693 CASE_MATHFN (BUILT_IN_J1)
1694 CASE_MATHFN (BUILT_IN_JN)
1695 CASE_MATHFN (BUILT_IN_LCEIL)
1696 CASE_MATHFN (BUILT_IN_LDEXP)
1697 CASE_MATHFN (BUILT_IN_LFLOOR)
1698 CASE_MATHFN (BUILT_IN_LGAMMA)
1699 CASE_MATHFN (BUILT_IN_LLCEIL)
1700 CASE_MATHFN (BUILT_IN_LLFLOOR)
1701 CASE_MATHFN (BUILT_IN_LLRINT)
1702 CASE_MATHFN (BUILT_IN_LLROUND)
1703 CASE_MATHFN (BUILT_IN_LOG)
1704 CASE_MATHFN (BUILT_IN_LOG10)
1705 CASE_MATHFN (BUILT_IN_LOG1P)
1706 CASE_MATHFN (BUILT_IN_LOG2)
1707 CASE_MATHFN (BUILT_IN_LOGB)
1708 CASE_MATHFN (BUILT_IN_LRINT)
1709 CASE_MATHFN (BUILT_IN_LROUND)
1710 CASE_MATHFN (BUILT_IN_MODF)
1711 CASE_MATHFN (BUILT_IN_NAN)
1712 CASE_MATHFN (BUILT_IN_NANS)
1713 CASE_MATHFN (BUILT_IN_NEARBYINT)
1714 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1715 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1716 CASE_MATHFN (BUILT_IN_POW)
1717 CASE_MATHFN (BUILT_IN_POWI)
1718 CASE_MATHFN (BUILT_IN_POW10)
1719 CASE_MATHFN (BUILT_IN_REMAINDER)
1720 CASE_MATHFN (BUILT_IN_REMQUO)
1721 CASE_MATHFN (BUILT_IN_RINT)
1722 CASE_MATHFN (BUILT_IN_ROUND)
1723 CASE_MATHFN (BUILT_IN_SCALB)
1724 CASE_MATHFN (BUILT_IN_SCALBLN)
1725 CASE_MATHFN (BUILT_IN_SCALBN)
1726 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1727 CASE_MATHFN (BUILT_IN_SIN)
1728 CASE_MATHFN (BUILT_IN_SINCOS)
1729 CASE_MATHFN (BUILT_IN_SINH)
1730 CASE_MATHFN (BUILT_IN_SQRT)
1731 CASE_MATHFN (BUILT_IN_TAN)
1732 CASE_MATHFN (BUILT_IN_TANH)
1733 CASE_MATHFN (BUILT_IN_TGAMMA)
1734 CASE_MATHFN (BUILT_IN_TRUNC)
1735 CASE_MATHFN (BUILT_IN_Y0)
1736 CASE_MATHFN (BUILT_IN_Y1)
1737 CASE_MATHFN (BUILT_IN_YN)
1739 default:
1740 return NULL_TREE;
1743 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1744 return implicit_built_in_decls[fcode];
1745 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1746 return implicit_built_in_decls[fcodef];
1747 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1748 return implicit_built_in_decls[fcodel];
1749 else
1750 return NULL_TREE;
1753 /* If errno must be maintained, expand the RTL to check if the result,
1754 TARGET, of a built-in function call, EXP, is NaN, and if so set
1755 errno to EDOM. */
1757 static void
1758 expand_errno_check (tree exp, rtx target)
1760 rtx lab = gen_label_rtx ();
1762 /* Test the result; if it is NaN, set errno=EDOM because
1763 the argument was not in the domain. */
1764 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1765 0, lab);
1767 #ifdef TARGET_EDOM
1768 /* If this built-in doesn't throw an exception, set errno directly. */
1769 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1771 #ifdef GEN_ERRNO_RTX
1772 rtx errno_rtx = GEN_ERRNO_RTX;
1773 #else
1774 rtx errno_rtx
1775 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1776 #endif
1777 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1778 emit_label (lab);
1779 return;
1781 #endif
1783 /* We can't set errno=EDOM directly; let the library call do it.
1784 Pop the arguments right away in case the call gets deleted. */
1785 NO_DEFER_POP;
1786 expand_call (exp, target, 0);
1787 OK_DEFER_POP;
1788 emit_label (lab);
1791 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1792 Return NULL_RTX if a normal call should be emitted rather than expanding
1793 the function in-line. EXP is the expression that is a call to the builtin
1794 function; if convenient, the result should be placed in TARGET.
1795 SUBTARGET may be used as the target for computing one of EXP's operands. */
1797 static rtx
1798 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1800 optab builtin_optab;
1801 rtx op0, insns, before_call;
1802 tree fndecl = get_callee_fndecl (exp);
1803 enum machine_mode mode;
1804 bool errno_set = false;
1805 tree arg, narg;
1807 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1808 return NULL_RTX;
1810 arg = CALL_EXPR_ARG (exp, 0);
1812 switch (DECL_FUNCTION_CODE (fndecl))
1814 CASE_FLT_FN (BUILT_IN_SQRT):
1815 errno_set = ! tree_expr_nonnegative_p (arg);
1816 builtin_optab = sqrt_optab;
1817 break;
1818 CASE_FLT_FN (BUILT_IN_EXP):
1819 errno_set = true; builtin_optab = exp_optab; break;
1820 CASE_FLT_FN (BUILT_IN_EXP10):
1821 CASE_FLT_FN (BUILT_IN_POW10):
1822 errno_set = true; builtin_optab = exp10_optab; break;
1823 CASE_FLT_FN (BUILT_IN_EXP2):
1824 errno_set = true; builtin_optab = exp2_optab; break;
1825 CASE_FLT_FN (BUILT_IN_EXPM1):
1826 errno_set = true; builtin_optab = expm1_optab; break;
1827 CASE_FLT_FN (BUILT_IN_LOGB):
1828 errno_set = true; builtin_optab = logb_optab; break;
1829 CASE_FLT_FN (BUILT_IN_LOG):
1830 errno_set = true; builtin_optab = log_optab; break;
1831 CASE_FLT_FN (BUILT_IN_LOG10):
1832 errno_set = true; builtin_optab = log10_optab; break;
1833 CASE_FLT_FN (BUILT_IN_LOG2):
1834 errno_set = true; builtin_optab = log2_optab; break;
1835 CASE_FLT_FN (BUILT_IN_LOG1P):
1836 errno_set = true; builtin_optab = log1p_optab; break;
1837 CASE_FLT_FN (BUILT_IN_ASIN):
1838 builtin_optab = asin_optab; break;
1839 CASE_FLT_FN (BUILT_IN_ACOS):
1840 builtin_optab = acos_optab; break;
1841 CASE_FLT_FN (BUILT_IN_TAN):
1842 builtin_optab = tan_optab; break;
1843 CASE_FLT_FN (BUILT_IN_ATAN):
1844 builtin_optab = atan_optab; break;
1845 CASE_FLT_FN (BUILT_IN_FLOOR):
1846 builtin_optab = floor_optab; break;
1847 CASE_FLT_FN (BUILT_IN_CEIL):
1848 builtin_optab = ceil_optab; break;
1849 CASE_FLT_FN (BUILT_IN_TRUNC):
1850 builtin_optab = btrunc_optab; break;
1851 CASE_FLT_FN (BUILT_IN_ROUND):
1852 builtin_optab = round_optab; break;
1853 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1854 builtin_optab = nearbyint_optab;
1855 if (flag_trapping_math)
1856 break;
1857 /* Else fallthrough and expand as rint. */
1858 CASE_FLT_FN (BUILT_IN_RINT):
1859 builtin_optab = rint_optab; break;
1860 default:
1861 gcc_unreachable ();
1864 /* Make a suitable register to place result in. */
1865 mode = TYPE_MODE (TREE_TYPE (exp));
1867 if (! flag_errno_math || ! HONOR_NANS (mode))
1868 errno_set = false;
1870 /* Before working hard, check whether the instruction is available. */
1871 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1873 target = gen_reg_rtx (mode);
1875 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1876 need to expand the argument again. This way, we will not perform
1877 side-effects more the once. */
1878 narg = builtin_save_expr (arg);
1879 if (narg != arg)
1881 arg = narg;
1882 exp = build_call_expr (fndecl, 1, arg);
1885 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1887 start_sequence ();
1889 /* Compute into TARGET.
1890 Set TARGET to wherever the result comes back. */
1891 target = expand_unop (mode, builtin_optab, op0, target, 0);
1893 if (target != 0)
1895 if (errno_set)
1896 expand_errno_check (exp, target);
1898 /* Output the entire sequence. */
1899 insns = get_insns ();
1900 end_sequence ();
1901 emit_insn (insns);
1902 return target;
1905 /* If we were unable to expand via the builtin, stop the sequence
1906 (without outputting the insns) and call to the library function
1907 with the stabilized argument list. */
1908 end_sequence ();
1911 before_call = get_last_insn ();
1913 target = expand_call (exp, target, target == const0_rtx);
1915 /* If this is a sqrt operation and we don't care about errno, try to
1916 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1917 This allows the semantics of the libcall to be visible to the RTL
1918 optimizers. */
1919 if (builtin_optab == sqrt_optab && !errno_set)
1921 /* Search backwards through the insns emitted by expand_call looking
1922 for the instruction with the REG_RETVAL note. */
1923 rtx last = get_last_insn ();
1924 while (last != before_call)
1926 if (find_reg_note (last, REG_RETVAL, NULL))
1928 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1929 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1930 two elements, i.e. symbol_ref(sqrt) and the operand. */
1931 if (note
1932 && GET_CODE (note) == EXPR_LIST
1933 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1934 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1935 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1937 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1938 /* Check operand is a register with expected mode. */
1939 if (operand
1940 && REG_P (operand)
1941 && GET_MODE (operand) == mode)
1943 /* Replace the REG_EQUAL note with a SQRT rtx. */
1944 rtx equiv = gen_rtx_SQRT (mode, operand);
1945 set_unique_reg_note (last, REG_EQUAL, equiv);
1948 break;
1950 last = PREV_INSN (last);
1954 return target;
1957 /* Expand a call to the builtin binary math functions (pow and atan2).
1958 Return NULL_RTX if a normal call should be emitted rather than expanding the
1959 function in-line. EXP is the expression that is a call to the builtin
1960 function; if convenient, the result should be placed in TARGET.
1961 SUBTARGET may be used as the target for computing one of EXP's
1962 operands. */
1964 static rtx
1965 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1967 optab builtin_optab;
1968 rtx op0, op1, insns;
1969 int op1_type = REAL_TYPE;
1970 tree fndecl = get_callee_fndecl (exp);
1971 tree arg0, arg1, narg;
1972 enum machine_mode mode;
1973 bool errno_set = true;
1974 bool stable = true;
1976 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXP
1977 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXPF
1978 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXPL)
1979 op1_type = INTEGER_TYPE;
1981 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
1982 return NULL_RTX;
1984 arg0 = CALL_EXPR_ARG (exp, 0);
1985 arg1 = CALL_EXPR_ARG (exp, 1);
1987 switch (DECL_FUNCTION_CODE (fndecl))
1989 CASE_FLT_FN (BUILT_IN_POW):
1990 builtin_optab = pow_optab; break;
1991 CASE_FLT_FN (BUILT_IN_ATAN2):
1992 builtin_optab = atan2_optab; break;
1993 CASE_FLT_FN (BUILT_IN_LDEXP):
1994 builtin_optab = ldexp_optab; break;
1995 CASE_FLT_FN (BUILT_IN_FMOD):
1996 builtin_optab = fmod_optab; break;
1997 CASE_FLT_FN (BUILT_IN_REMAINDER):
1998 CASE_FLT_FN (BUILT_IN_DREM):
1999 builtin_optab = remainder_optab; break;
2000 default:
2001 gcc_unreachable ();
2004 /* Make a suitable register to place result in. */
2005 mode = TYPE_MODE (TREE_TYPE (exp));
2007 /* Before working hard, check whether the instruction is available. */
2008 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2009 return NULL_RTX;
2011 target = gen_reg_rtx (mode);
2013 if (! flag_errno_math || ! HONOR_NANS (mode))
2014 errno_set = false;
2016 /* Always stabilize the argument list. */
2017 narg = builtin_save_expr (arg1);
2018 if (narg != arg1)
2020 arg1 = narg;
2021 stable = false;
2023 narg = builtin_save_expr (arg0);
2024 if (narg != arg0)
2026 arg0 = narg;
2027 stable = false;
2030 if (! stable)
2031 exp = build_call_expr (fndecl, 2, arg0, arg1);
2033 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2034 op1 = expand_normal (arg1);
2036 start_sequence ();
2038 /* Compute into TARGET.
2039 Set TARGET to wherever the result comes back. */
2040 target = expand_binop (mode, builtin_optab, op0, op1,
2041 target, 0, OPTAB_DIRECT);
2043 /* If we were unable to expand via the builtin, stop the sequence
2044 (without outputting the insns) and call to the library function
2045 with the stabilized argument list. */
2046 if (target == 0)
2048 end_sequence ();
2049 return expand_call (exp, target, target == const0_rtx);
2052 if (errno_set)
2053 expand_errno_check (exp, target);
2055 /* Output the entire sequence. */
2056 insns = get_insns ();
2057 end_sequence ();
2058 emit_insn (insns);
2060 return target;
2063 /* Expand a call to the builtin sin and cos math functions.
2064 Return NULL_RTX if a normal call should be emitted rather than expanding the
2065 function in-line. EXP is the expression that is a call to the builtin
2066 function; if convenient, the result should be placed in TARGET.
2067 SUBTARGET may be used as the target for computing one of EXP's
2068 operands. */
2070 static rtx
2071 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2073 optab builtin_optab;
2074 rtx op0, insns;
2075 tree fndecl = get_callee_fndecl (exp);
2076 enum machine_mode mode;
2077 tree arg, narg;
2079 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2080 return NULL_RTX;
2082 arg = CALL_EXPR_ARG (exp, 0);
2084 switch (DECL_FUNCTION_CODE (fndecl))
2086 CASE_FLT_FN (BUILT_IN_SIN):
2087 CASE_FLT_FN (BUILT_IN_COS):
2088 builtin_optab = sincos_optab; break;
2089 default:
2090 gcc_unreachable ();
2093 /* Make a suitable register to place result in. */
2094 mode = TYPE_MODE (TREE_TYPE (exp));
2096 /* Check if sincos insn is available, otherwise fallback
2097 to sin or cos insn. */
2098 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2099 switch (DECL_FUNCTION_CODE (fndecl))
2101 CASE_FLT_FN (BUILT_IN_SIN):
2102 builtin_optab = sin_optab; break;
2103 CASE_FLT_FN (BUILT_IN_COS):
2104 builtin_optab = cos_optab; break;
2105 default:
2106 gcc_unreachable ();
2109 /* Before working hard, check whether the instruction is available. */
2110 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2112 target = gen_reg_rtx (mode);
2114 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2115 need to expand the argument again. This way, we will not perform
2116 side-effects more the once. */
2117 narg = save_expr (arg);
2118 if (narg != arg)
2120 arg = narg;
2121 exp = build_call_expr (fndecl, 1, arg);
2124 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2126 start_sequence ();
2128 /* Compute into TARGET.
2129 Set TARGET to wherever the result comes back. */
2130 if (builtin_optab == sincos_optab)
2132 int result;
2134 switch (DECL_FUNCTION_CODE (fndecl))
2136 CASE_FLT_FN (BUILT_IN_SIN):
2137 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2138 break;
2139 CASE_FLT_FN (BUILT_IN_COS):
2140 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2141 break;
2142 default:
2143 gcc_unreachable ();
2145 gcc_assert (result);
2147 else
2149 target = expand_unop (mode, builtin_optab, op0, target, 0);
2152 if (target != 0)
2154 /* Output the entire sequence. */
2155 insns = get_insns ();
2156 end_sequence ();
2157 emit_insn (insns);
2158 return target;
2161 /* If we were unable to expand via the builtin, stop the sequence
2162 (without outputting the insns) and call to the library function
2163 with the stabilized argument list. */
2164 end_sequence ();
2167 target = expand_call (exp, target, target == const0_rtx);
2169 return target;
2172 /* Expand a call to one of the builtin math functions that operate on
2173 floating point argument and output an integer result (ilogb, isinf,
2174 isnan, etc).
2175 Return 0 if a normal call should be emitted rather than expanding the
2176 function in-line. EXP is the expression that is a call to the builtin
2177 function; if convenient, the result should be placed in TARGET.
2178 SUBTARGET may be used as the target for computing one of EXP's operands. */
2180 static rtx
2181 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2183 optab builtin_optab;
2184 enum insn_code icode;
2185 rtx op0;
2186 tree fndecl = get_callee_fndecl (exp);
2187 enum machine_mode mode;
2188 bool errno_set = false;
2189 tree arg, narg;
2191 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2192 return NULL_RTX;
2194 arg = CALL_EXPR_ARG (exp, 0);
2196 switch (DECL_FUNCTION_CODE (fndecl))
2198 CASE_FLT_FN (BUILT_IN_ILOGB):
2199 errno_set = true; builtin_optab = ilogb_optab; break;
2200 CASE_FLT_FN (BUILT_IN_ISINF):
2201 builtin_optab = isinf_optab; break;
2202 default:
2203 gcc_unreachable ();
2206 /* There's no easy way to detect the case we need to set EDOM. */
2207 if (flag_errno_math && errno_set)
2208 return NULL_RTX;
2210 /* Optab mode depends on the mode of the input argument. */
2211 mode = TYPE_MODE (TREE_TYPE (arg));
2213 icode = builtin_optab->handlers[(int) mode].insn_code;
2215 /* Before working hard, check whether the instruction is available. */
2216 if (icode != CODE_FOR_nothing)
2218 /* Make a suitable register to place result in. */
2219 if (!target
2220 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2221 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2223 gcc_assert (insn_data[icode].operand[0].predicate
2224 (target, GET_MODE (target)));
2226 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2227 need to expand the argument again. This way, we will not perform
2228 side-effects more the once. */
2229 narg = builtin_save_expr (arg);
2230 if (narg != arg)
2232 arg = narg;
2233 exp = build_call_expr (fndecl, 1, arg);
2236 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2238 if (mode != GET_MODE (op0))
2239 op0 = convert_to_mode (mode, op0, 0);
2241 /* Compute into TARGET.
2242 Set TARGET to wherever the result comes back. */
2243 emit_unop_insn (icode, target, op0, UNKNOWN);
2244 return target;
2247 target = expand_call (exp, target, target == const0_rtx);
2249 return target;
2252 /* Expand a call to the builtin sincos math function.
2253 Return NULL_RTX if a normal call should be emitted rather than expanding the
2254 function in-line. EXP is the expression that is a call to the builtin
2255 function. */
2257 static rtx
2258 expand_builtin_sincos (tree exp)
2260 rtx op0, op1, op2, target1, target2;
2261 enum machine_mode mode;
2262 tree arg, sinp, cosp;
2263 int result;
2265 if (!validate_arglist (exp, REAL_TYPE,
2266 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2267 return NULL_RTX;
2269 arg = CALL_EXPR_ARG (exp, 0);
2270 sinp = CALL_EXPR_ARG (exp, 1);
2271 cosp = CALL_EXPR_ARG (exp, 2);
2273 /* Make a suitable register to place result in. */
2274 mode = TYPE_MODE (TREE_TYPE (arg));
2276 /* Check if sincos insn is available, otherwise emit the call. */
2277 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2278 return NULL_RTX;
2280 target1 = gen_reg_rtx (mode);
2281 target2 = gen_reg_rtx (mode);
2283 op0 = expand_normal (arg);
2284 op1 = expand_normal (build_fold_indirect_ref (sinp));
2285 op2 = expand_normal (build_fold_indirect_ref (cosp));
2287 /* Compute into target1 and target2.
2288 Set TARGET to wherever the result comes back. */
2289 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2290 gcc_assert (result);
2292 /* Move target1 and target2 to the memory locations indicated
2293 by op1 and op2. */
2294 emit_move_insn (op1, target1);
2295 emit_move_insn (op2, target2);
2297 return const0_rtx;
2300 /* Expand a call to the internal cexpi builtin to the sincos math function.
2301 EXP is the expression that is a call to the builtin function; if convenient,
2302 the result should be placed in TARGET. SUBTARGET may be used as the target
2303 for computing one of EXP's operands. */
2305 static rtx
2306 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2308 tree fndecl = get_callee_fndecl (exp);
2309 tree arg, type;
2310 enum machine_mode mode;
2311 rtx op0, op1, op2;
2313 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2314 return NULL_RTX;
2316 arg = CALL_EXPR_ARG (exp, 0);
2317 type = TREE_TYPE (arg);
2318 mode = TYPE_MODE (TREE_TYPE (arg));
2320 /* Try expanding via a sincos optab, fall back to emitting a libcall
2321 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2322 is only generated from sincos, cexp or if we have either of them. */
2323 if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2325 op1 = gen_reg_rtx (mode);
2326 op2 = gen_reg_rtx (mode);
2328 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2330 /* Compute into op1 and op2. */
2331 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2333 else if (TARGET_HAS_SINCOS)
2335 tree call, fn = NULL_TREE;
2336 tree top1, top2;
2337 rtx op1a, op2a;
2339 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2340 fn = built_in_decls[BUILT_IN_SINCOSF];
2341 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2342 fn = built_in_decls[BUILT_IN_SINCOS];
2343 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2344 fn = built_in_decls[BUILT_IN_SINCOSL];
2345 else
2346 gcc_unreachable ();
2348 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2349 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2350 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2351 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2352 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2353 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2355 /* Make sure not to fold the sincos call again. */
2356 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2357 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2358 call, 3, arg, top1, top2));
2360 else
2362 tree call, fn = NULL_TREE, narg;
2363 tree ctype = build_complex_type (type);
2365 /* We can expand via the C99 cexp function. */
2366 gcc_assert (TARGET_C99_FUNCTIONS);
2368 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2369 fn = built_in_decls[BUILT_IN_CEXPF];
2370 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2371 fn = built_in_decls[BUILT_IN_CEXP];
2372 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2373 fn = built_in_decls[BUILT_IN_CEXPL];
2374 else
2375 gcc_unreachable ();
2376 narg = fold_build2 (COMPLEX_EXPR, ctype,
2377 build_real (type, dconst0), arg);
2379 /* Make sure not to fold the cexp call again. */
2380 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2381 return expand_expr (build_call_nary (ctype, call, 1, arg),
2382 target, VOIDmode, 0);
2385 /* Now build the proper return type. */
2386 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2387 make_tree (TREE_TYPE (arg), op2),
2388 make_tree (TREE_TYPE (arg), op1)),
2389 target, VOIDmode, 0);
2392 /* Expand a call to one of the builtin rounding functions gcc defines
2393 as an extension (lfloor and lceil). As these are gcc extensions we
2394 do not need to worry about setting errno to EDOM.
2395 If expanding via optab fails, lower expression to (int)(floor(x)).
2396 EXP is the expression that is a call to the builtin function;
2397 if convenient, the result should be placed in TARGET. SUBTARGET may
2398 be used as the target for computing one of EXP's operands. */
2400 static rtx
2401 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2403 convert_optab builtin_optab;
2404 rtx op0, insns, tmp;
2405 tree fndecl = get_callee_fndecl (exp);
2406 enum built_in_function fallback_fn;
2407 tree fallback_fndecl;
2408 enum machine_mode mode;
2409 tree arg, narg;
2411 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2412 gcc_unreachable ();
2414 arg = CALL_EXPR_ARG (exp, 0);
2416 switch (DECL_FUNCTION_CODE (fndecl))
2418 CASE_FLT_FN (BUILT_IN_LCEIL):
2419 CASE_FLT_FN (BUILT_IN_LLCEIL):
2420 builtin_optab = lceil_optab;
2421 fallback_fn = BUILT_IN_CEIL;
2422 break;
2424 CASE_FLT_FN (BUILT_IN_LFLOOR):
2425 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2426 builtin_optab = lfloor_optab;
2427 fallback_fn = BUILT_IN_FLOOR;
2428 break;
2430 default:
2431 gcc_unreachable ();
2434 /* Make a suitable register to place result in. */
2435 mode = TYPE_MODE (TREE_TYPE (exp));
2437 target = gen_reg_rtx (mode);
2439 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2440 need to expand the argument again. This way, we will not perform
2441 side-effects more the once. */
2442 narg = builtin_save_expr (arg);
2443 if (narg != arg)
2445 arg = narg;
2446 exp = build_call_expr (fndecl, 1, arg);
2449 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2451 start_sequence ();
2453 /* Compute into TARGET. */
2454 if (expand_sfix_optab (target, op0, builtin_optab))
2456 /* Output the entire sequence. */
2457 insns = get_insns ();
2458 end_sequence ();
2459 emit_insn (insns);
2460 return target;
2463 /* If we were unable to expand via the builtin, stop the sequence
2464 (without outputting the insns). */
2465 end_sequence ();
2467 /* Fall back to floating point rounding optab. */
2468 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2469 /* We shouldn't get here on targets without TARGET_C99_FUNCTIONS.
2470 ??? Perhaps convert (int)floorf(x) into (int)floor((double)x). */
2471 gcc_assert (fallback_fndecl != NULL_TREE);
2472 exp = build_call_expr (fallback_fndecl, 1, arg);
2474 tmp = expand_normal (exp);
2476 /* Truncate the result of floating point optab to integer
2477 via expand_fix (). */
2478 target = gen_reg_rtx (mode);
2479 expand_fix (target, tmp, 0);
2481 return target;
2484 /* Expand a call to one of the builtin math functions doing integer
2485 conversion (lrint).
2486 Return 0 if a normal call should be emitted rather than expanding the
2487 function in-line. EXP is the expression that is a call to the builtin
2488 function; if convenient, the result should be placed in TARGET.
2489 SUBTARGET may be used as the target for computing one of EXP's operands. */
2491 static rtx
2492 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2494 convert_optab builtin_optab;
2495 rtx op0, insns;
2496 tree fndecl = get_callee_fndecl (exp);
2497 tree arg, narg;
2498 enum machine_mode mode;
2500 /* There's no easy way to detect the case we need to set EDOM. */
2501 if (flag_errno_math)
2502 return NULL_RTX;
2504 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2505 gcc_unreachable ();
2507 arg = CALL_EXPR_ARG (exp, 0);
2509 switch (DECL_FUNCTION_CODE (fndecl))
2511 CASE_FLT_FN (BUILT_IN_LRINT):
2512 CASE_FLT_FN (BUILT_IN_LLRINT):
2513 builtin_optab = lrint_optab; break;
2514 CASE_FLT_FN (BUILT_IN_LROUND):
2515 CASE_FLT_FN (BUILT_IN_LLROUND):
2516 builtin_optab = lround_optab; break;
2517 default:
2518 gcc_unreachable ();
2521 /* Make a suitable register to place result in. */
2522 mode = TYPE_MODE (TREE_TYPE (exp));
2524 target = gen_reg_rtx (mode);
2526 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2527 need to expand the argument again. This way, we will not perform
2528 side-effects more the once. */
2529 narg = builtin_save_expr (arg);
2530 if (narg != arg)
2532 arg = narg;
2533 exp = build_call_expr (fndecl, 1, arg);
2536 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2538 start_sequence ();
2540 if (expand_sfix_optab (target, op0, builtin_optab))
2542 /* Output the entire sequence. */
2543 insns = get_insns ();
2544 end_sequence ();
2545 emit_insn (insns);
2546 return target;
2549 /* If we were unable to expand via the builtin, stop the sequence
2550 (without outputting the insns) and call to the library function
2551 with the stabilized argument list. */
2552 end_sequence ();
2554 target = expand_call (exp, target, target == const0_rtx);
2556 return target;
2559 /* To evaluate powi(x,n), the floating point value x raised to the
2560 constant integer exponent n, we use a hybrid algorithm that
2561 combines the "window method" with look-up tables. For an
2562 introduction to exponentiation algorithms and "addition chains",
2563 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2564 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2565 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2566 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2568 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2569 multiplications to inline before calling the system library's pow
2570 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2571 so this default never requires calling pow, powf or powl. */
2573 #ifndef POWI_MAX_MULTS
2574 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2575 #endif
2577 /* The size of the "optimal power tree" lookup table. All
2578 exponents less than this value are simply looked up in the
2579 powi_table below. This threshold is also used to size the
2580 cache of pseudo registers that hold intermediate results. */
2581 #define POWI_TABLE_SIZE 256
2583 /* The size, in bits of the window, used in the "window method"
2584 exponentiation algorithm. This is equivalent to a radix of
2585 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2586 #define POWI_WINDOW_SIZE 3
2588 /* The following table is an efficient representation of an
2589 "optimal power tree". For each value, i, the corresponding
2590 value, j, in the table states than an optimal evaluation
2591 sequence for calculating pow(x,i) can be found by evaluating
2592 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2593 100 integers is given in Knuth's "Seminumerical algorithms". */
2595 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2597 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2598 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2599 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2600 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2601 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2602 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2603 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2604 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2605 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2606 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2607 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2608 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2609 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2610 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2611 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2612 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2613 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2614 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2615 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2616 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2617 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2618 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2619 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2620 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2621 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2622 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2623 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2624 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2625 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2626 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2627 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2628 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2632 /* Return the number of multiplications required to calculate
2633 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2634 subroutine of powi_cost. CACHE is an array indicating
2635 which exponents have already been calculated. */
2637 static int
2638 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2640 /* If we've already calculated this exponent, then this evaluation
2641 doesn't require any additional multiplications. */
2642 if (cache[n])
2643 return 0;
2645 cache[n] = true;
2646 return powi_lookup_cost (n - powi_table[n], cache)
2647 + powi_lookup_cost (powi_table[n], cache) + 1;
2650 /* Return the number of multiplications required to calculate
2651 powi(x,n) for an arbitrary x, given the exponent N. This
2652 function needs to be kept in sync with expand_powi below. */
2654 static int
2655 powi_cost (HOST_WIDE_INT n)
2657 bool cache[POWI_TABLE_SIZE];
2658 unsigned HOST_WIDE_INT digit;
2659 unsigned HOST_WIDE_INT val;
2660 int result;
2662 if (n == 0)
2663 return 0;
2665 /* Ignore the reciprocal when calculating the cost. */
2666 val = (n < 0) ? -n : n;
2668 /* Initialize the exponent cache. */
2669 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2670 cache[1] = true;
2672 result = 0;
2674 while (val >= POWI_TABLE_SIZE)
2676 if (val & 1)
2678 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2679 result += powi_lookup_cost (digit, cache)
2680 + POWI_WINDOW_SIZE + 1;
2681 val >>= POWI_WINDOW_SIZE;
2683 else
2685 val >>= 1;
2686 result++;
2690 return result + powi_lookup_cost (val, cache);
2693 /* Recursive subroutine of expand_powi. This function takes the array,
2694 CACHE, of already calculated exponents and an exponent N and returns
2695 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2697 static rtx
2698 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2700 unsigned HOST_WIDE_INT digit;
2701 rtx target, result;
2702 rtx op0, op1;
2704 if (n < POWI_TABLE_SIZE)
2706 if (cache[n])
2707 return cache[n];
2709 target = gen_reg_rtx (mode);
2710 cache[n] = target;
2712 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2713 op1 = expand_powi_1 (mode, powi_table[n], cache);
2715 else if (n & 1)
2717 target = gen_reg_rtx (mode);
2718 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2719 op0 = expand_powi_1 (mode, n - digit, cache);
2720 op1 = expand_powi_1 (mode, digit, cache);
2722 else
2724 target = gen_reg_rtx (mode);
2725 op0 = expand_powi_1 (mode, n >> 1, cache);
2726 op1 = op0;
2729 result = expand_mult (mode, op0, op1, target, 0);
2730 if (result != target)
2731 emit_move_insn (target, result);
2732 return target;
2735 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2736 floating point operand in mode MODE, and N is the exponent. This
2737 function needs to be kept in sync with powi_cost above. */
2739 static rtx
2740 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2742 unsigned HOST_WIDE_INT val;
2743 rtx cache[POWI_TABLE_SIZE];
2744 rtx result;
2746 if (n == 0)
2747 return CONST1_RTX (mode);
2749 val = (n < 0) ? -n : n;
2751 memset (cache, 0, sizeof (cache));
2752 cache[1] = x;
2754 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2756 /* If the original exponent was negative, reciprocate the result. */
2757 if (n < 0)
2758 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2759 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2761 return result;
2764 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2765 a normal call should be emitted rather than expanding the function
2766 in-line. EXP is the expression that is a call to the builtin
2767 function; if convenient, the result should be placed in TARGET. */
2769 static rtx
2770 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2772 tree arg0, arg1;
2773 tree fn, narg0;
2774 tree type = TREE_TYPE (exp);
2775 REAL_VALUE_TYPE cint, c, c2;
2776 HOST_WIDE_INT n;
2777 rtx op, op2;
2778 enum machine_mode mode = TYPE_MODE (type);
2780 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2781 return NULL_RTX;
2783 arg0 = CALL_EXPR_ARG (exp, 0);
2784 arg1 = CALL_EXPR_ARG (exp, 1);
2786 if (TREE_CODE (arg1) != REAL_CST
2787 || TREE_OVERFLOW (arg1))
2788 return expand_builtin_mathfn_2 (exp, target, subtarget);
2790 /* Handle constant exponents. */
2792 /* For integer valued exponents we can expand to an optimal multiplication
2793 sequence using expand_powi. */
2794 c = TREE_REAL_CST (arg1);
2795 n = real_to_integer (&c);
2796 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2797 if (real_identical (&c, &cint)
2798 && ((n >= -1 && n <= 2)
2799 || (flag_unsafe_math_optimizations
2800 && !optimize_size
2801 && powi_cost (n) <= POWI_MAX_MULTS)))
2803 op = expand_expr (arg0, subtarget, VOIDmode, 0);
2804 if (n != 1)
2806 op = force_reg (mode, op);
2807 op = expand_powi (op, mode, n);
2809 return op;
2812 narg0 = builtin_save_expr (arg0);
2814 /* If the exponent is not integer valued, check if it is half of an integer.
2815 In this case we can expand to sqrt (x) * x**(n/2). */
2816 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2817 if (fn != NULL_TREE)
2819 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2820 n = real_to_integer (&c2);
2821 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2822 if (real_identical (&c2, &cint)
2823 && ((flag_unsafe_math_optimizations
2824 && !optimize_size
2825 && powi_cost (n/2) <= POWI_MAX_MULTS)
2826 || n == 1))
2828 tree call_expr = build_call_expr (fn, 1, narg0);
2829 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2830 if (n != 1)
2832 op2 = expand_expr (narg0, subtarget, VOIDmode, 0);
2833 op2 = force_reg (mode, op2);
2834 op2 = expand_powi (op2, mode, abs (n / 2));
2835 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2836 0, OPTAB_LIB_WIDEN);
2837 /* If the original exponent was negative, reciprocate the
2838 result. */
2839 if (n < 0)
2840 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2841 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2843 return op;
2847 /* Try if the exponent is a third of an integer. In this case
2848 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2849 different from pow (x, 1./3.) due to rounding and behavior
2850 with negative x we need to constrain this transformation to
2851 unsafe math and positive x or finite math. */
2852 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2853 if (fn != NULL_TREE
2854 && flag_unsafe_math_optimizations
2855 && (tree_expr_nonnegative_p (arg0)
2856 || !HONOR_NANS (mode)))
2858 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2859 real_round (&c2, mode, &c2);
2860 n = real_to_integer (&c2);
2861 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2862 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2863 real_convert (&c2, mode, &c2);
2864 if (real_identical (&c2, &c)
2865 && ((!optimize_size
2866 && powi_cost (n/3) <= POWI_MAX_MULTS)
2867 || n == 1))
2869 tree call_expr = build_call_expr (fn, 1,narg0);
2870 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2871 if (abs (n) % 3 == 2)
2872 op = expand_simple_binop (mode, MULT, op, op, op,
2873 0, OPTAB_LIB_WIDEN);
2874 if (n != 1)
2876 op2 = expand_expr (narg0, subtarget, VOIDmode, 0);
2877 op2 = force_reg (mode, op2);
2878 op2 = expand_powi (op2, mode, abs (n / 3));
2879 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2880 0, OPTAB_LIB_WIDEN);
2881 /* If the original exponent was negative, reciprocate the
2882 result. */
2883 if (n < 0)
2884 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2885 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2887 return op;
2891 /* Fall back to optab expansion. */
2892 return expand_builtin_mathfn_2 (exp, target, subtarget);
2895 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2896 a normal call should be emitted rather than expanding the function
2897 in-line. EXP is the expression that is a call to the builtin
2898 function; if convenient, the result should be placed in TARGET. */
2900 static rtx
2901 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2903 tree arg0, arg1;
2904 rtx op0, op1;
2905 enum machine_mode mode;
2906 enum machine_mode mode2;
2908 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2909 return NULL_RTX;
2911 arg0 = CALL_EXPR_ARG (exp, 0);
2912 arg1 = CALL_EXPR_ARG (exp, 1);
2913 mode = TYPE_MODE (TREE_TYPE (exp));
2915 /* Handle constant power. */
2917 if (TREE_CODE (arg1) == INTEGER_CST
2918 && !TREE_OVERFLOW (arg1))
2920 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
2922 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2923 Otherwise, check the number of multiplications required. */
2924 if ((TREE_INT_CST_HIGH (arg1) == 0
2925 || TREE_INT_CST_HIGH (arg1) == -1)
2926 && ((n >= -1 && n <= 2)
2927 || (! optimize_size
2928 && powi_cost (n) <= POWI_MAX_MULTS)))
2930 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
2931 op0 = force_reg (mode, op0);
2932 return expand_powi (op0, mode, n);
2936 /* Emit a libcall to libgcc. */
2938 /* Mode of the 2nd argument must match that of an int. */
2939 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2941 if (target == NULL_RTX)
2942 target = gen_reg_rtx (mode);
2944 op0 = expand_expr (arg0, subtarget, mode, 0);
2945 if (GET_MODE (op0) != mode)
2946 op0 = convert_to_mode (mode, op0, 0);
2947 op1 = expand_expr (arg1, 0, mode2, 0);
2948 if (GET_MODE (op1) != mode2)
2949 op1 = convert_to_mode (mode2, op1, 0);
2951 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
2952 target, LCT_CONST_MAKE_BLOCK, mode, 2,
2953 op0, mode, op1, mode2);
2955 return target;
2958 /* Expand expression EXP which is a call to the strlen builtin. Return
2959 NULL_RTX if we failed the caller should emit a normal call, otherwise
2960 try to get the result in TARGET, if convenient. */
2962 static rtx
2963 expand_builtin_strlen (tree exp, rtx target,
2964 enum machine_mode target_mode)
2966 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2967 return NULL_RTX;
2968 else
2970 rtx pat;
2971 tree len;
2972 tree src = CALL_EXPR_ARG (exp, 0);
2973 rtx result, src_reg, char_rtx, before_strlen;
2974 enum machine_mode insn_mode = target_mode, char_mode;
2975 enum insn_code icode = CODE_FOR_nothing;
2976 int align;
2978 /* If the length can be computed at compile-time, return it. */
2979 len = c_strlen (src, 0);
2980 if (len)
2981 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2983 /* If the length can be computed at compile-time and is constant
2984 integer, but there are side-effects in src, evaluate
2985 src for side-effects, then return len.
2986 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2987 can be optimized into: i++; x = 3; */
2988 len = c_strlen (src, 1);
2989 if (len && TREE_CODE (len) == INTEGER_CST)
2991 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2992 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2995 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2997 /* If SRC is not a pointer type, don't do this operation inline. */
2998 if (align == 0)
2999 return NULL_RTX;
3001 /* Bail out if we can't compute strlen in the right mode. */
3002 while (insn_mode != VOIDmode)
3004 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
3005 if (icode != CODE_FOR_nothing)
3006 break;
3008 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3010 if (insn_mode == VOIDmode)
3011 return NULL_RTX;
3013 /* Make a place to write the result of the instruction. */
3014 result = target;
3015 if (! (result != 0
3016 && REG_P (result)
3017 && GET_MODE (result) == insn_mode
3018 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3019 result = gen_reg_rtx (insn_mode);
3021 /* Make a place to hold the source address. We will not expand
3022 the actual source until we are sure that the expansion will
3023 not fail -- there are trees that cannot be expanded twice. */
3024 src_reg = gen_reg_rtx (Pmode);
3026 /* Mark the beginning of the strlen sequence so we can emit the
3027 source operand later. */
3028 before_strlen = get_last_insn ();
3030 char_rtx = const0_rtx;
3031 char_mode = insn_data[(int) icode].operand[2].mode;
3032 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3033 char_mode))
3034 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3036 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3037 char_rtx, GEN_INT (align));
3038 if (! pat)
3039 return NULL_RTX;
3040 emit_insn (pat);
3042 /* Now that we are assured of success, expand the source. */
3043 start_sequence ();
3044 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3045 if (pat != src_reg)
3046 emit_move_insn (src_reg, pat);
3047 pat = get_insns ();
3048 end_sequence ();
3050 if (before_strlen)
3051 emit_insn_after (pat, before_strlen);
3052 else
3053 emit_insn_before (pat, get_insns ());
3055 /* Return the value in the proper mode for this function. */
3056 if (GET_MODE (result) == target_mode)
3057 target = result;
3058 else if (target != 0)
3059 convert_move (target, result, 0);
3060 else
3061 target = convert_to_mode (target_mode, result, 0);
3063 return target;
3067 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3068 caller should emit a normal call, otherwise try to get the result
3069 in TARGET, if convenient (and in mode MODE if that's convenient). */
3071 static rtx
3072 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3074 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3076 tree type = TREE_TYPE (exp);
3077 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3078 CALL_EXPR_ARG (exp, 1), type);
3079 if (result)
3080 return expand_expr (result, target, mode, EXPAND_NORMAL);
3082 return NULL_RTX;
3085 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3086 caller should emit a normal call, otherwise try to get the result
3087 in TARGET, if convenient (and in mode MODE if that's convenient). */
3089 static rtx
3090 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3092 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3094 tree type = TREE_TYPE (exp);
3095 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3096 CALL_EXPR_ARG (exp, 1), type);
3097 if (result)
3098 return expand_expr (result, target, mode, EXPAND_NORMAL);
3100 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3102 return NULL_RTX;
3105 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3106 caller should emit a normal call, otherwise try to get the result
3107 in TARGET, if convenient (and in mode MODE if that's convenient). */
3109 static rtx
3110 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3112 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3114 tree type = TREE_TYPE (exp);
3115 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3116 CALL_EXPR_ARG (exp, 1), type);
3117 if (result)
3118 return expand_expr (result, target, mode, EXPAND_NORMAL);
3120 return NULL_RTX;
3123 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3124 caller should emit a normal call, otherwise try to get the result
3125 in TARGET, if convenient (and in mode MODE if that's convenient). */
3127 static rtx
3128 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3130 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3132 tree type = TREE_TYPE (exp);
3133 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3134 CALL_EXPR_ARG (exp, 1), type);
3135 if (result)
3136 return expand_expr (result, target, mode, EXPAND_NORMAL);
3138 return NULL_RTX;
3141 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3142 bytes from constant string DATA + OFFSET and return it as target
3143 constant. */
3145 static rtx
3146 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3147 enum machine_mode mode)
3149 const char *str = (const char *) data;
3151 gcc_assert (offset >= 0
3152 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3153 <= strlen (str) + 1));
3155 return c_readstr (str + offset, mode);
3158 /* Expand a call EXP to the memcpy builtin.
3159 Return NULL_RTX if we failed, the caller should emit a normal call,
3160 otherwise try to get the result in TARGET, if convenient (and in
3161 mode MODE if that's convenient). */
3163 static rtx
3164 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3166 tree fndecl = get_callee_fndecl (exp);
3168 if (!validate_arglist (exp,
3169 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3170 return NULL_RTX;
3171 else
3173 tree dest = CALL_EXPR_ARG (exp, 0);
3174 tree src = CALL_EXPR_ARG (exp, 1);
3175 tree len = CALL_EXPR_ARG (exp, 2);
3176 const char *src_str;
3177 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3178 unsigned int dest_align
3179 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3180 rtx dest_mem, src_mem, dest_addr, len_rtx;
3181 tree result = fold_builtin_memory_op (dest, src, len,
3182 TREE_TYPE (TREE_TYPE (fndecl)),
3183 false, /*endp=*/0);
3184 HOST_WIDE_INT expected_size = -1;
3185 unsigned int expected_align = 0;
3187 if (result)
3189 while (TREE_CODE (result) == COMPOUND_EXPR)
3191 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3192 EXPAND_NORMAL);
3193 result = TREE_OPERAND (result, 1);
3195 return expand_expr (result, target, mode, EXPAND_NORMAL);
3198 /* If DEST is not a pointer type, call the normal function. */
3199 if (dest_align == 0)
3200 return NULL_RTX;
3202 /* If either SRC is not a pointer type, don't do this
3203 operation in-line. */
3204 if (src_align == 0)
3205 return NULL_RTX;
3207 stringop_block_profile (exp, &expected_align, &expected_size);
3208 if (expected_align < dest_align)
3209 expected_align = dest_align;
3210 dest_mem = get_memory_rtx (dest, len);
3211 set_mem_align (dest_mem, dest_align);
3212 len_rtx = expand_normal (len);
3213 src_str = c_getstr (src);
3215 /* If SRC is a string constant and block move would be done
3216 by pieces, we can avoid loading the string from memory
3217 and only stored the computed constants. */
3218 if (src_str
3219 && GET_CODE (len_rtx) == CONST_INT
3220 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3221 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3222 (void *) src_str, dest_align))
3224 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3225 builtin_memcpy_read_str,
3226 (void *) src_str, dest_align, 0);
3227 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3228 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3229 return dest_mem;
3232 src_mem = get_memory_rtx (src, len);
3233 set_mem_align (src_mem, src_align);
3235 /* Copy word part most expediently. */
3236 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3237 CALL_EXPR_TAILCALL (exp)
3238 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3239 expected_align, expected_size);
3241 if (dest_addr == 0)
3243 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3244 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3246 return dest_addr;
3250 /* Expand a call EXP to the mempcpy builtin.
3251 Return NULL_RTX if we failed; the caller should emit a normal call,
3252 otherwise try to get the result in TARGET, if convenient (and in
3253 mode MODE if that's convenient). If ENDP is 0 return the
3254 destination pointer, if ENDP is 1 return the end pointer ala
3255 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3256 stpcpy. */
3258 static rtx
3259 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3261 if (!validate_arglist (exp,
3262 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3263 return NULL_RTX;
3264 else
3266 tree dest = CALL_EXPR_ARG (exp, 0);
3267 tree src = CALL_EXPR_ARG (exp, 1);
3268 tree len = CALL_EXPR_ARG (exp, 2);
3269 return expand_builtin_mempcpy_args (dest, src, len,
3270 TREE_TYPE (exp),
3271 target, mode, /*endp=*/ 1);
3275 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3276 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3277 so that this can also be called without constructing an actual CALL_EXPR.
3278 TYPE is the return type of the call. The other arguments and return value
3279 are the same as for expand_builtin_mempcpy. */
3281 static rtx
3282 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3283 rtx target, enum machine_mode mode, int endp)
3285 /* If return value is ignored, transform mempcpy into memcpy. */
3286 if (target == const0_rtx)
3288 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3290 if (!fn)
3291 return NULL_RTX;
3293 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3294 target, mode, EXPAND_NORMAL);
3296 else
3298 const char *src_str;
3299 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3300 unsigned int dest_align
3301 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3302 rtx dest_mem, src_mem, len_rtx;
3303 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3305 if (result)
3307 while (TREE_CODE (result) == COMPOUND_EXPR)
3309 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3310 EXPAND_NORMAL);
3311 result = TREE_OPERAND (result, 1);
3313 return expand_expr (result, target, mode, EXPAND_NORMAL);
3316 /* If either SRC or DEST is not a pointer type, don't do this
3317 operation in-line. */
3318 if (dest_align == 0 || src_align == 0)
3319 return NULL_RTX;
3321 /* If LEN is not constant, call the normal function. */
3322 if (! host_integerp (len, 1))
3323 return NULL_RTX;
3325 len_rtx = expand_normal (len);
3326 src_str = c_getstr (src);
3328 /* If SRC is a string constant and block move would be done
3329 by pieces, we can avoid loading the string from memory
3330 and only stored the computed constants. */
3331 if (src_str
3332 && GET_CODE (len_rtx) == CONST_INT
3333 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3334 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3335 (void *) src_str, dest_align))
3337 dest_mem = get_memory_rtx (dest, len);
3338 set_mem_align (dest_mem, dest_align);
3339 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3340 builtin_memcpy_read_str,
3341 (void *) src_str, dest_align, endp);
3342 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3343 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3344 return dest_mem;
3347 if (GET_CODE (len_rtx) == CONST_INT
3348 && can_move_by_pieces (INTVAL (len_rtx),
3349 MIN (dest_align, src_align)))
3351 dest_mem = get_memory_rtx (dest, len);
3352 set_mem_align (dest_mem, dest_align);
3353 src_mem = get_memory_rtx (src, len);
3354 set_mem_align (src_mem, src_align);
3355 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3356 MIN (dest_align, src_align), endp);
3357 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3358 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3359 return dest_mem;
3362 return NULL_RTX;
3366 /* Expand expression EXP, which is a call to the memmove builtin. Return
3367 NULL_RTX if we failed; the caller should emit a normal call. */
3369 static rtx
3370 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3372 if (!validate_arglist (exp,
3373 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3374 return NULL_RTX;
3375 else
3377 tree dest = CALL_EXPR_ARG (exp, 0);
3378 tree src = CALL_EXPR_ARG (exp, 1);
3379 tree len = CALL_EXPR_ARG (exp, 2);
3380 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3381 target, mode, ignore);
3385 /* Helper function to do the actual work for expand_builtin_memmove. The
3386 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3387 so that this can also be called without constructing an actual CALL_EXPR.
3388 TYPE is the return type of the call. The other arguments and return value
3389 are the same as for expand_builtin_memmove. */
3391 static rtx
3392 expand_builtin_memmove_args (tree dest, tree src, tree len,
3393 tree type, rtx target, enum machine_mode mode,
3394 int ignore)
3396 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3398 if (result)
3400 while (TREE_CODE (result) == COMPOUND_EXPR)
3402 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3403 EXPAND_NORMAL);
3404 result = TREE_OPERAND (result, 1);
3406 return expand_expr (result, target, mode, EXPAND_NORMAL);
3409 /* Otherwise, call the normal function. */
3410 return NULL_RTX;
3413 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3414 NULL_RTX if we failed the caller should emit a normal call. */
3416 static rtx
3417 expand_builtin_bcopy (tree exp, int ignore)
3419 tree type = TREE_TYPE (exp);
3420 tree src, dest, size;
3422 if (!validate_arglist (exp,
3423 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3424 return NULL_RTX;
3426 src = CALL_EXPR_ARG (exp, 0);
3427 dest = CALL_EXPR_ARG (exp, 1);
3428 size = CALL_EXPR_ARG (exp, 2);
3430 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3431 This is done this way so that if it isn't expanded inline, we fall
3432 back to calling bcopy instead of memmove. */
3433 return expand_builtin_memmove_args (dest, src,
3434 fold_convert (sizetype, size),
3435 type, const0_rtx, VOIDmode,
3436 ignore);
3439 #ifndef HAVE_movstr
3440 # define HAVE_movstr 0
3441 # define CODE_FOR_movstr CODE_FOR_nothing
3442 #endif
3444 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3445 we failed, the caller should emit a normal call, otherwise try to
3446 get the result in TARGET, if convenient. If ENDP is 0 return the
3447 destination pointer, if ENDP is 1 return the end pointer ala
3448 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3449 stpcpy. */
3451 static rtx
3452 expand_movstr (tree dest, tree src, rtx target, int endp)
3454 rtx end;
3455 rtx dest_mem;
3456 rtx src_mem;
3457 rtx insn;
3458 const struct insn_data * data;
3460 if (!HAVE_movstr)
3461 return NULL_RTX;
3463 dest_mem = get_memory_rtx (dest, NULL);
3464 src_mem = get_memory_rtx (src, NULL);
3465 if (!endp)
3467 target = force_reg (Pmode, XEXP (dest_mem, 0));
3468 dest_mem = replace_equiv_address (dest_mem, target);
3469 end = gen_reg_rtx (Pmode);
3471 else
3473 if (target == 0 || target == const0_rtx)
3475 end = gen_reg_rtx (Pmode);
3476 if (target == 0)
3477 target = end;
3479 else
3480 end = target;
3483 data = insn_data + CODE_FOR_movstr;
3485 if (data->operand[0].mode != VOIDmode)
3486 end = gen_lowpart (data->operand[0].mode, end);
3488 insn = data->genfun (end, dest_mem, src_mem);
3490 gcc_assert (insn);
3492 emit_insn (insn);
3494 /* movstr is supposed to set end to the address of the NUL
3495 terminator. If the caller requested a mempcpy-like return value,
3496 adjust it. */
3497 if (endp == 1 && target != const0_rtx)
3499 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3500 emit_move_insn (target, force_operand (tem, NULL_RTX));
3503 return target;
3506 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3507 NULL_RTX if we failed the caller should emit a normal call, otherwise
3508 try to get the result in TARGET, if convenient (and in mode MODE if that's
3509 convenient). */
3511 static rtx
3512 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3514 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3516 tree dest = CALL_EXPR_ARG (exp, 0);
3517 tree src = CALL_EXPR_ARG (exp, 1);
3518 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3520 return NULL_RTX;
3523 /* Helper function to do the actual work for expand_builtin_strcpy. The
3524 arguments to the builtin_strcpy call DEST and SRC are broken out
3525 so that this can also be called without constructing an actual CALL_EXPR.
3526 The other arguments and return value are the same as for
3527 expand_builtin_strcpy. */
3529 static rtx
3530 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3531 rtx target, enum machine_mode mode)
3533 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3534 if (result)
3535 return expand_expr (result, target, mode, EXPAND_NORMAL);
3536 return expand_movstr (dest, src, target, /*endp=*/0);
3540 /* Expand a call EXP to the stpcpy builtin.
3541 Return NULL_RTX if we failed the caller should emit a normal call,
3542 otherwise try to get the result in TARGET, if convenient (and in
3543 mode MODE if that's convenient). */
3545 static rtx
3546 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3548 tree dst, src;
3550 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3551 return NULL_RTX;
3553 dst = CALL_EXPR_ARG (exp, 0);
3554 src = CALL_EXPR_ARG (exp, 1);
3556 /* If return value is ignored, transform stpcpy into strcpy. */
3557 if (target == const0_rtx)
3559 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3560 if (!fn)
3561 return NULL_RTX;
3563 return expand_expr (build_call_expr (fn, 2, dst, src),
3564 target, mode, EXPAND_NORMAL);
3566 else
3568 tree len, lenp1;
3569 rtx ret;
3571 /* Ensure we get an actual string whose length can be evaluated at
3572 compile-time, not an expression containing a string. This is
3573 because the latter will potentially produce pessimized code
3574 when used to produce the return value. */
3575 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3576 return expand_movstr (dst, src, target, /*endp=*/2);
3578 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3579 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3580 target, mode, /*endp=*/2);
3582 if (ret)
3583 return ret;
3585 if (TREE_CODE (len) == INTEGER_CST)
3587 rtx len_rtx = expand_normal (len);
3589 if (GET_CODE (len_rtx) == CONST_INT)
3591 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3592 dst, src, target, mode);
3594 if (ret)
3596 if (! target)
3598 if (mode != VOIDmode)
3599 target = gen_reg_rtx (mode);
3600 else
3601 target = gen_reg_rtx (GET_MODE (ret));
3603 if (GET_MODE (target) != GET_MODE (ret))
3604 ret = gen_lowpart (GET_MODE (target), ret);
3606 ret = plus_constant (ret, INTVAL (len_rtx));
3607 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3608 gcc_assert (ret);
3610 return target;
3615 return expand_movstr (dst, src, target, /*endp=*/2);
3619 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3620 bytes from constant string DATA + OFFSET and return it as target
3621 constant. */
3623 static rtx
3624 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3625 enum machine_mode mode)
3627 const char *str = (const char *) data;
3629 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3630 return const0_rtx;
3632 return c_readstr (str + offset, mode);
3635 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3636 NULL_RTX if we failed the caller should emit a normal call. */
3638 static rtx
3639 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3641 tree fndecl = get_callee_fndecl (exp);
3643 if (validate_arglist (exp,
3644 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3646 tree dest = CALL_EXPR_ARG (exp, 0);
3647 tree src = CALL_EXPR_ARG (exp, 1);
3648 tree len = CALL_EXPR_ARG (exp, 2);
3649 tree slen = c_strlen (src, 1);
3650 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3652 if (result)
3654 while (TREE_CODE (result) == COMPOUND_EXPR)
3656 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3657 EXPAND_NORMAL);
3658 result = TREE_OPERAND (result, 1);
3660 return expand_expr (result, target, mode, EXPAND_NORMAL);
3663 /* We must be passed a constant len and src parameter. */
3664 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3665 return NULL_RTX;
3667 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3669 /* We're required to pad with trailing zeros if the requested
3670 len is greater than strlen(s2)+1. In that case try to
3671 use store_by_pieces, if it fails, punt. */
3672 if (tree_int_cst_lt (slen, len))
3674 unsigned int dest_align
3675 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3676 const char *p = c_getstr (src);
3677 rtx dest_mem;
3679 if (!p || dest_align == 0 || !host_integerp (len, 1)
3680 || !can_store_by_pieces (tree_low_cst (len, 1),
3681 builtin_strncpy_read_str,
3682 (void *) p, dest_align))
3683 return NULL_RTX;
3685 dest_mem = get_memory_rtx (dest, len);
3686 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3687 builtin_strncpy_read_str,
3688 (void *) p, dest_align, 0);
3689 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3690 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3691 return dest_mem;
3694 return NULL_RTX;
3697 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3698 bytes from constant string DATA + OFFSET and return it as target
3699 constant. */
3702 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3703 enum machine_mode mode)
3705 const char *c = (const char *) data;
3706 char *p = alloca (GET_MODE_SIZE (mode));
3708 memset (p, *c, GET_MODE_SIZE (mode));
3710 return c_readstr (p, mode);
3713 /* Callback routine for store_by_pieces. Return the RTL of a register
3714 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3715 char value given in the RTL register data. For example, if mode is
3716 4 bytes wide, return the RTL for 0x01010101*data. */
3718 static rtx
3719 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3720 enum machine_mode mode)
3722 rtx target, coeff;
3723 size_t size;
3724 char *p;
3726 size = GET_MODE_SIZE (mode);
3727 if (size == 1)
3728 return (rtx) data;
3730 p = alloca (size);
3731 memset (p, 1, size);
3732 coeff = c_readstr (p, mode);
3734 target = convert_to_mode (mode, (rtx) data, 1);
3735 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3736 return force_reg (mode, target);
3739 /* Expand expression EXP, which is a call to the memset builtin. Return
3740 NULL_RTX if we failed the caller should emit a normal call, otherwise
3741 try to get the result in TARGET, if convenient (and in mode MODE if that's
3742 convenient). */
3744 static rtx
3745 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3747 if (!validate_arglist (exp,
3748 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3749 return NULL_RTX;
3750 else
3752 tree dest = CALL_EXPR_ARG (exp, 0);
3753 tree val = CALL_EXPR_ARG (exp, 1);
3754 tree len = CALL_EXPR_ARG (exp, 2);
3755 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3759 /* Helper function to do the actual work for expand_builtin_memset. The
3760 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3761 so that this can also be called without constructing an actual CALL_EXPR.
3762 The other arguments and return value are the same as for
3763 expand_builtin_memset. */
3765 static rtx
3766 expand_builtin_memset_args (tree dest, tree val, tree len,
3767 rtx target, enum machine_mode mode, tree orig_exp)
3769 tree fndecl, fn;
3770 enum built_in_function fcode;
3771 char c;
3772 unsigned int dest_align;
3773 rtx dest_mem, dest_addr, len_rtx;
3774 HOST_WIDE_INT expected_size = -1;
3775 unsigned int expected_align = 0;
3777 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3779 /* If DEST is not a pointer type, don't do this operation in-line. */
3780 if (dest_align == 0)
3781 return NULL_RTX;
3783 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3784 if (expected_align < dest_align)
3785 expected_align = dest_align;
3787 /* If the LEN parameter is zero, return DEST. */
3788 if (integer_zerop (len))
3790 /* Evaluate and ignore VAL in case it has side-effects. */
3791 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3792 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3795 /* Stabilize the arguments in case we fail. */
3796 dest = builtin_save_expr (dest);
3797 val = builtin_save_expr (val);
3798 len = builtin_save_expr (len);
3800 len_rtx = expand_normal (len);
3801 dest_mem = get_memory_rtx (dest, len);
3803 if (TREE_CODE (val) != INTEGER_CST)
3805 rtx val_rtx;
3807 val_rtx = expand_normal (val);
3808 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3809 val_rtx, 0);
3811 /* Assume that we can memset by pieces if we can store
3812 * the coefficients by pieces (in the required modes).
3813 * We can't pass builtin_memset_gen_str as that emits RTL. */
3814 c = 1;
3815 if (host_integerp (len, 1)
3816 && !(optimize_size && tree_low_cst (len, 1) > 1)
3817 && can_store_by_pieces (tree_low_cst (len, 1),
3818 builtin_memset_read_str, &c, dest_align))
3820 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3821 val_rtx);
3822 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3823 builtin_memset_gen_str, val_rtx, dest_align, 0);
3825 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3826 dest_align, expected_align,
3827 expected_size))
3828 goto do_libcall;
3830 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3831 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3832 return dest_mem;
3835 if (target_char_cast (val, &c))
3836 goto do_libcall;
3838 if (c)
3840 if (host_integerp (len, 1)
3841 && !(optimize_size && tree_low_cst (len, 1) > 1)
3842 && can_store_by_pieces (tree_low_cst (len, 1),
3843 builtin_memset_read_str, &c, dest_align))
3844 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3845 builtin_memset_read_str, &c, dest_align, 0);
3846 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3847 dest_align, expected_align,
3848 expected_size))
3849 goto do_libcall;
3851 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3852 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3853 return dest_mem;
3856 set_mem_align (dest_mem, dest_align);
3857 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3858 CALL_EXPR_TAILCALL (orig_exp)
3859 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3860 expected_align, expected_size);
3862 if (dest_addr == 0)
3864 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3865 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3868 return dest_addr;
3870 do_libcall:
3871 fndecl = get_callee_fndecl (orig_exp);
3872 fcode = DECL_FUNCTION_CODE (fndecl);
3873 if (fcode == BUILT_IN_MEMSET)
3874 fn = build_call_expr (fndecl, 3, dest, val, len);
3875 else if (fcode == BUILT_IN_BZERO)
3876 fn = build_call_expr (fndecl, 2, dest, len);
3877 else
3878 gcc_unreachable ();
3879 if (TREE_CODE (fn) == CALL_EXPR)
3880 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3881 return expand_call (fn, target, target == const0_rtx);
3884 /* Expand expression EXP, which is a call to the bzero builtin. Return
3885 NULL_RTX if we failed the caller should emit a normal call. */
3887 static rtx
3888 expand_builtin_bzero (tree exp)
3890 tree dest, size;
3892 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3893 return NULL_RTX;
3895 dest = CALL_EXPR_ARG (exp, 0);
3896 size = CALL_EXPR_ARG (exp, 1);
3898 /* New argument list transforming bzero(ptr x, int y) to
3899 memset(ptr x, int 0, size_t y). This is done this way
3900 so that if it isn't expanded inline, we fallback to
3901 calling bzero instead of memset. */
3903 return expand_builtin_memset_args (dest, integer_zero_node,
3904 fold_convert (sizetype, size),
3905 const0_rtx, VOIDmode, exp);
3908 /* Expand expression EXP, which is a call to the memcmp built-in function.
3909 Return NULL_RTX if we failed and the
3910 caller should emit a normal call, otherwise try to get the result in
3911 TARGET, if convenient (and in mode MODE, if that's convenient). */
3913 static rtx
3914 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
3916 if (!validate_arglist (exp,
3917 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3918 return NULL_RTX;
3919 else
3921 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
3922 CALL_EXPR_ARG (exp, 1),
3923 CALL_EXPR_ARG (exp, 2));
3924 if (result)
3925 return expand_expr (result, target, mode, EXPAND_NORMAL);
3928 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3930 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3931 rtx result;
3932 rtx insn;
3933 tree arg1 = CALL_EXPR_ARG (exp, 0);
3934 tree arg2 = CALL_EXPR_ARG (exp, 1);
3935 tree len = CALL_EXPR_ARG (exp, 2);
3937 int arg1_align
3938 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3939 int arg2_align
3940 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3941 enum machine_mode insn_mode;
3943 #ifdef HAVE_cmpmemsi
3944 if (HAVE_cmpmemsi)
3945 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3946 else
3947 #endif
3948 #ifdef HAVE_cmpstrnsi
3949 if (HAVE_cmpstrnsi)
3950 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3951 else
3952 #endif
3953 return NULL_RTX;
3955 /* If we don't have POINTER_TYPE, call the function. */
3956 if (arg1_align == 0 || arg2_align == 0)
3957 return NULL_RTX;
3959 /* Make a place to write the result of the instruction. */
3960 result = target;
3961 if (! (result != 0
3962 && REG_P (result) && GET_MODE (result) == insn_mode
3963 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3964 result = gen_reg_rtx (insn_mode);
3966 arg1_rtx = get_memory_rtx (arg1, len);
3967 arg2_rtx = get_memory_rtx (arg2, len);
3968 arg3_rtx = expand_normal (len);
3970 /* Set MEM_SIZE as appropriate. */
3971 if (GET_CODE (arg3_rtx) == CONST_INT)
3973 set_mem_size (arg1_rtx, arg3_rtx);
3974 set_mem_size (arg2_rtx, arg3_rtx);
3977 #ifdef HAVE_cmpmemsi
3978 if (HAVE_cmpmemsi)
3979 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3980 GEN_INT (MIN (arg1_align, arg2_align)));
3981 else
3982 #endif
3983 #ifdef HAVE_cmpstrnsi
3984 if (HAVE_cmpstrnsi)
3985 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3986 GEN_INT (MIN (arg1_align, arg2_align)));
3987 else
3988 #endif
3989 gcc_unreachable ();
3991 if (insn)
3992 emit_insn (insn);
3993 else
3994 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
3995 TYPE_MODE (integer_type_node), 3,
3996 XEXP (arg1_rtx, 0), Pmode,
3997 XEXP (arg2_rtx, 0), Pmode,
3998 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3999 TYPE_UNSIGNED (sizetype)),
4000 TYPE_MODE (sizetype));
4002 /* Return the value in the proper mode for this function. */
4003 mode = TYPE_MODE (TREE_TYPE (exp));
4004 if (GET_MODE (result) == mode)
4005 return result;
4006 else if (target != 0)
4008 convert_move (target, result, 0);
4009 return target;
4011 else
4012 return convert_to_mode (mode, result, 0);
4014 #endif
4016 return NULL_RTX;
4019 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4020 if we failed the caller should emit a normal call, otherwise try to get
4021 the result in TARGET, if convenient. */
4023 static rtx
4024 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4026 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4027 return NULL_RTX;
4028 else
4030 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4031 CALL_EXPR_ARG (exp, 1));
4032 if (result)
4033 return expand_expr (result, target, mode, EXPAND_NORMAL);
4036 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4037 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4038 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4040 rtx arg1_rtx, arg2_rtx;
4041 rtx result, insn = NULL_RTX;
4042 tree fndecl, fn;
4043 tree arg1 = CALL_EXPR_ARG (exp, 0);
4044 tree arg2 = CALL_EXPR_ARG (exp, 1);
4046 int arg1_align
4047 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4048 int arg2_align
4049 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4051 /* If we don't have POINTER_TYPE, call the function. */
4052 if (arg1_align == 0 || arg2_align == 0)
4053 return NULL_RTX;
4055 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4056 arg1 = builtin_save_expr (arg1);
4057 arg2 = builtin_save_expr (arg2);
4059 arg1_rtx = get_memory_rtx (arg1, NULL);
4060 arg2_rtx = get_memory_rtx (arg2, NULL);
4062 #ifdef HAVE_cmpstrsi
4063 /* Try to call cmpstrsi. */
4064 if (HAVE_cmpstrsi)
4066 enum machine_mode insn_mode
4067 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4069 /* Make a place to write the result of the instruction. */
4070 result = target;
4071 if (! (result != 0
4072 && REG_P (result) && GET_MODE (result) == insn_mode
4073 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4074 result = gen_reg_rtx (insn_mode);
4076 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4077 GEN_INT (MIN (arg1_align, arg2_align)));
4079 #endif
4080 #ifdef HAVE_cmpstrnsi
4081 /* Try to determine at least one length and call cmpstrnsi. */
4082 if (!insn && HAVE_cmpstrnsi)
4084 tree len;
4085 rtx arg3_rtx;
4087 enum machine_mode insn_mode
4088 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4089 tree len1 = c_strlen (arg1, 1);
4090 tree len2 = c_strlen (arg2, 1);
4092 if (len1)
4093 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4094 if (len2)
4095 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4097 /* If we don't have a constant length for the first, use the length
4098 of the second, if we know it. We don't require a constant for
4099 this case; some cost analysis could be done if both are available
4100 but neither is constant. For now, assume they're equally cheap,
4101 unless one has side effects. If both strings have constant lengths,
4102 use the smaller. */
4104 if (!len1)
4105 len = len2;
4106 else if (!len2)
4107 len = len1;
4108 else if (TREE_SIDE_EFFECTS (len1))
4109 len = len2;
4110 else if (TREE_SIDE_EFFECTS (len2))
4111 len = len1;
4112 else if (TREE_CODE (len1) != INTEGER_CST)
4113 len = len2;
4114 else if (TREE_CODE (len2) != INTEGER_CST)
4115 len = len1;
4116 else if (tree_int_cst_lt (len1, len2))
4117 len = len1;
4118 else
4119 len = len2;
4121 /* If both arguments have side effects, we cannot optimize. */
4122 if (!len || TREE_SIDE_EFFECTS (len))
4123 goto do_libcall;
4125 arg3_rtx = expand_normal (len);
4127 /* Make a place to write the result of the instruction. */
4128 result = target;
4129 if (! (result != 0
4130 && REG_P (result) && GET_MODE (result) == insn_mode
4131 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4132 result = gen_reg_rtx (insn_mode);
4134 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4135 GEN_INT (MIN (arg1_align, arg2_align)));
4137 #endif
4139 if (insn)
4141 emit_insn (insn);
4143 /* Return the value in the proper mode for this function. */
4144 mode = TYPE_MODE (TREE_TYPE (exp));
4145 if (GET_MODE (result) == mode)
4146 return result;
4147 if (target == 0)
4148 return convert_to_mode (mode, result, 0);
4149 convert_move (target, result, 0);
4150 return target;
4153 /* Expand the library call ourselves using a stabilized argument
4154 list to avoid re-evaluating the function's arguments twice. */
4155 #ifdef HAVE_cmpstrnsi
4156 do_libcall:
4157 #endif
4158 fndecl = get_callee_fndecl (exp);
4159 fn = build_call_expr (fndecl, 2, arg1, arg2);
4160 if (TREE_CODE (fn) == CALL_EXPR)
4161 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4162 return expand_call (fn, target, target == const0_rtx);
4164 #endif
4165 return NULL_RTX;
4168 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4169 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4170 the result in TARGET, if convenient. */
4172 static rtx
4173 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4175 if (!validate_arglist (exp,
4176 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4177 return NULL_RTX;
4178 else
4180 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4181 CALL_EXPR_ARG (exp, 1),
4182 CALL_EXPR_ARG (exp, 2));
4183 if (result)
4184 return expand_expr (result, target, mode, EXPAND_NORMAL);
4187 /* If c_strlen can determine an expression for one of the string
4188 lengths, and it doesn't have side effects, then emit cmpstrnsi
4189 using length MIN(strlen(string)+1, arg3). */
4190 #ifdef HAVE_cmpstrnsi
4191 if (HAVE_cmpstrnsi)
4193 tree len, len1, len2;
4194 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4195 rtx result, insn;
4196 tree fndecl, fn;
4197 tree arg1 = CALL_EXPR_ARG (exp, 0);
4198 tree arg2 = CALL_EXPR_ARG (exp, 1);
4199 tree arg3 = CALL_EXPR_ARG (exp, 2);
4201 int arg1_align
4202 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4203 int arg2_align
4204 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4205 enum machine_mode insn_mode
4206 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4208 len1 = c_strlen (arg1, 1);
4209 len2 = c_strlen (arg2, 1);
4211 if (len1)
4212 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4213 if (len2)
4214 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4216 /* If we don't have a constant length for the first, use the length
4217 of the second, if we know it. We don't require a constant for
4218 this case; some cost analysis could be done if both are available
4219 but neither is constant. For now, assume they're equally cheap,
4220 unless one has side effects. If both strings have constant lengths,
4221 use the smaller. */
4223 if (!len1)
4224 len = len2;
4225 else if (!len2)
4226 len = len1;
4227 else if (TREE_SIDE_EFFECTS (len1))
4228 len = len2;
4229 else if (TREE_SIDE_EFFECTS (len2))
4230 len = len1;
4231 else if (TREE_CODE (len1) != INTEGER_CST)
4232 len = len2;
4233 else if (TREE_CODE (len2) != INTEGER_CST)
4234 len = len1;
4235 else if (tree_int_cst_lt (len1, len2))
4236 len = len1;
4237 else
4238 len = len2;
4240 /* If both arguments have side effects, we cannot optimize. */
4241 if (!len || TREE_SIDE_EFFECTS (len))
4242 return NULL_RTX;
4244 /* The actual new length parameter is MIN(len,arg3). */
4245 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4246 fold_convert (TREE_TYPE (len), arg3));
4248 /* If we don't have POINTER_TYPE, call the function. */
4249 if (arg1_align == 0 || arg2_align == 0)
4250 return NULL_RTX;
4252 /* Make a place to write the result of the instruction. */
4253 result = target;
4254 if (! (result != 0
4255 && REG_P (result) && GET_MODE (result) == insn_mode
4256 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4257 result = gen_reg_rtx (insn_mode);
4259 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4260 arg1 = builtin_save_expr (arg1);
4261 arg2 = builtin_save_expr (arg2);
4262 len = builtin_save_expr (len);
4264 arg1_rtx = get_memory_rtx (arg1, len);
4265 arg2_rtx = get_memory_rtx (arg2, len);
4266 arg3_rtx = expand_normal (len);
4267 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4268 GEN_INT (MIN (arg1_align, arg2_align)));
4269 if (insn)
4271 emit_insn (insn);
4273 /* Return the value in the proper mode for this function. */
4274 mode = TYPE_MODE (TREE_TYPE (exp));
4275 if (GET_MODE (result) == mode)
4276 return result;
4277 if (target == 0)
4278 return convert_to_mode (mode, result, 0);
4279 convert_move (target, result, 0);
4280 return target;
4283 /* Expand the library call ourselves using a stabilized argument
4284 list to avoid re-evaluating the function's arguments twice. */
4285 fndecl = get_callee_fndecl (exp);
4286 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4287 if (TREE_CODE (fn) == CALL_EXPR)
4288 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4289 return expand_call (fn, target, target == const0_rtx);
4291 #endif
4292 return NULL_RTX;
4295 /* Expand expression EXP, which is a call to the strcat builtin.
4296 Return NULL_RTX if we failed the caller should emit a normal call,
4297 otherwise try to get the result in TARGET, if convenient. */
4299 static rtx
4300 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4302 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4303 return NULL_RTX;
4304 else
4306 tree dst = CALL_EXPR_ARG (exp, 0);
4307 tree src = CALL_EXPR_ARG (exp, 1);
4308 const char *p = c_getstr (src);
4310 /* If the string length is zero, return the dst parameter. */
4311 if (p && *p == '\0')
4312 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4314 if (!optimize_size)
4316 /* See if we can store by pieces into (dst + strlen(dst)). */
4317 tree newsrc, newdst,
4318 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4319 rtx insns;
4321 /* Stabilize the argument list. */
4322 newsrc = builtin_save_expr (src);
4323 dst = builtin_save_expr (dst);
4325 start_sequence ();
4327 /* Create strlen (dst). */
4328 newdst = build_call_expr (strlen_fn, 1, dst);
4329 /* Create (dst + (cast) strlen (dst)). */
4330 newdst = fold_convert (TREE_TYPE (dst), newdst);
4331 newdst = fold_build2 (PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4333 newdst = builtin_save_expr (newdst);
4335 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4337 end_sequence (); /* Stop sequence. */
4338 return NULL_RTX;
4341 /* Output the entire sequence. */
4342 insns = get_insns ();
4343 end_sequence ();
4344 emit_insn (insns);
4346 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4349 return NULL_RTX;
4353 /* Expand expression EXP, which is a call to the strncat builtin.
4354 Return NULL_RTX if we failed the caller should emit a normal call,
4355 otherwise try to get the result in TARGET, if convenient. */
4357 static rtx
4358 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4360 if (validate_arglist (exp,
4361 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4363 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4364 CALL_EXPR_ARG (exp, 1),
4365 CALL_EXPR_ARG (exp, 2));
4366 if (result)
4367 return expand_expr (result, target, mode, EXPAND_NORMAL);
4369 return NULL_RTX;
4372 /* Expand expression EXP, which is a call to the strspn builtin.
4373 Return NULL_RTX if we failed the caller should emit a normal call,
4374 otherwise try to get the result in TARGET, if convenient. */
4376 static rtx
4377 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4379 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4381 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4382 CALL_EXPR_ARG (exp, 1));
4383 if (result)
4384 return expand_expr (result, target, mode, EXPAND_NORMAL);
4386 return NULL_RTX;
4389 /* Expand expression EXP, which is a call to the strcspn builtin.
4390 Return NULL_RTX if we failed the caller should emit a normal call,
4391 otherwise try to get the result in TARGET, if convenient. */
4393 static rtx
4394 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4396 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4398 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4399 CALL_EXPR_ARG (exp, 1));
4400 if (result)
4401 return expand_expr (result, target, mode, EXPAND_NORMAL);
4403 return NULL_RTX;
4406 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4407 if that's convenient. */
4410 expand_builtin_saveregs (void)
4412 rtx val, seq;
4414 /* Don't do __builtin_saveregs more than once in a function.
4415 Save the result of the first call and reuse it. */
4416 if (saveregs_value != 0)
4417 return saveregs_value;
4419 /* When this function is called, it means that registers must be
4420 saved on entry to this function. So we migrate the call to the
4421 first insn of this function. */
4423 start_sequence ();
4425 /* Do whatever the machine needs done in this case. */
4426 val = targetm.calls.expand_builtin_saveregs ();
4428 seq = get_insns ();
4429 end_sequence ();
4431 saveregs_value = val;
4433 /* Put the insns after the NOTE that starts the function. If this
4434 is inside a start_sequence, make the outer-level insn chain current, so
4435 the code is placed at the start of the function. */
4436 push_topmost_sequence ();
4437 emit_insn_after (seq, entry_of_function ());
4438 pop_topmost_sequence ();
4440 return val;
4443 /* __builtin_args_info (N) returns word N of the arg space info
4444 for the current function. The number and meanings of words
4445 is controlled by the definition of CUMULATIVE_ARGS. */
4447 static rtx
4448 expand_builtin_args_info (tree exp)
4450 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4451 int *word_ptr = (int *) &current_function_args_info;
4453 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4455 if (call_expr_nargs (exp) != 0)
4457 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4458 error ("argument of %<__builtin_args_info%> must be constant");
4459 else
4461 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4463 if (wordnum < 0 || wordnum >= nwords)
4464 error ("argument of %<__builtin_args_info%> out of range");
4465 else
4466 return GEN_INT (word_ptr[wordnum]);
4469 else
4470 error ("missing argument in %<__builtin_args_info%>");
4472 return const0_rtx;
4475 /* Expand a call to __builtin_next_arg. */
4477 static rtx
4478 expand_builtin_next_arg (void)
4480 /* Checking arguments is already done in fold_builtin_next_arg
4481 that must be called before this function. */
4482 return expand_binop (Pmode, add_optab,
4483 current_function_internal_arg_pointer,
4484 current_function_arg_offset_rtx,
4485 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4488 /* Make it easier for the backends by protecting the valist argument
4489 from multiple evaluations. */
4491 static tree
4492 stabilize_va_list (tree valist, int needs_lvalue)
4494 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4496 if (TREE_SIDE_EFFECTS (valist))
4497 valist = save_expr (valist);
4499 /* For this case, the backends will be expecting a pointer to
4500 TREE_TYPE (va_list_type_node), but it's possible we've
4501 actually been given an array (an actual va_list_type_node).
4502 So fix it. */
4503 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4505 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4506 valist = build_fold_addr_expr_with_type (valist, p1);
4509 else
4511 tree pt;
4513 if (! needs_lvalue)
4515 if (! TREE_SIDE_EFFECTS (valist))
4516 return valist;
4518 pt = build_pointer_type (va_list_type_node);
4519 valist = fold_build1 (ADDR_EXPR, pt, valist);
4520 TREE_SIDE_EFFECTS (valist) = 1;
4523 if (TREE_SIDE_EFFECTS (valist))
4524 valist = save_expr (valist);
4525 valist = build_fold_indirect_ref (valist);
4528 return valist;
4531 /* The "standard" definition of va_list is void*. */
4533 tree
4534 std_build_builtin_va_list (void)
4536 return ptr_type_node;
4539 /* The "standard" implementation of va_start: just assign `nextarg' to
4540 the variable. */
4542 void
4543 std_expand_builtin_va_start (tree valist, rtx nextarg)
4545 tree t;
4547 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
4548 make_tree (ptr_type_node, nextarg));
4549 TREE_SIDE_EFFECTS (t) = 1;
4551 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4554 /* Expand EXP, a call to __builtin_va_start. */
4556 static rtx
4557 expand_builtin_va_start (tree exp)
4559 rtx nextarg;
4560 tree valist;
4562 if (call_expr_nargs (exp) < 2)
4564 error ("too few arguments to function %<va_start%>");
4565 return const0_rtx;
4568 if (fold_builtin_next_arg (exp, true))
4569 return const0_rtx;
4571 nextarg = expand_builtin_next_arg ();
4572 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4574 #ifdef EXPAND_BUILTIN_VA_START
4575 EXPAND_BUILTIN_VA_START (valist, nextarg);
4576 #else
4577 std_expand_builtin_va_start (valist, nextarg);
4578 #endif
4580 return const0_rtx;
4583 /* The "standard" implementation of va_arg: read the value from the
4584 current (padded) address and increment by the (padded) size. */
4586 tree
4587 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4589 tree addr, t, type_size, rounded_size, valist_tmp;
4590 unsigned HOST_WIDE_INT align, boundary;
4591 bool indirect;
4593 #ifdef ARGS_GROW_DOWNWARD
4594 /* All of the alignment and movement below is for args-grow-up machines.
4595 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4596 implement their own specialized gimplify_va_arg_expr routines. */
4597 gcc_unreachable ();
4598 #endif
4600 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4601 if (indirect)
4602 type = build_pointer_type (type);
4604 align = PARM_BOUNDARY / BITS_PER_UNIT;
4605 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4607 /* Hoist the valist value into a temporary for the moment. */
4608 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4610 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4611 requires greater alignment, we must perform dynamic alignment. */
4612 if (boundary > align
4613 && !integer_zerop (TYPE_SIZE (type)))
4615 t = fold_convert (TREE_TYPE (valist), size_int (boundary - 1));
4616 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4617 build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t));
4618 gimplify_and_add (t, pre_p);
4620 t = fold_convert (TREE_TYPE (valist), size_int (-boundary));
4621 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4622 build2 (BIT_AND_EXPR, TREE_TYPE (valist), valist_tmp, t));
4623 gimplify_and_add (t, pre_p);
4625 else
4626 boundary = align;
4628 /* If the actual alignment is less than the alignment of the type,
4629 adjust the type accordingly so that we don't assume strict alignment
4630 when deferencing the pointer. */
4631 boundary *= BITS_PER_UNIT;
4632 if (boundary < TYPE_ALIGN (type))
4634 type = build_variant_type_copy (type);
4635 TYPE_ALIGN (type) = boundary;
4638 /* Compute the rounded size of the type. */
4639 type_size = size_in_bytes (type);
4640 rounded_size = round_up (type_size, align);
4642 /* Reduce rounded_size so it's sharable with the postqueue. */
4643 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4645 /* Get AP. */
4646 addr = valist_tmp;
4647 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4649 /* Small args are padded downward. */
4650 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4651 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4652 size_binop (MINUS_EXPR, rounded_size, type_size));
4653 t = fold_convert (TREE_TYPE (addr), t);
4654 addr = fold_build2 (PLUS_EXPR, TREE_TYPE (addr), addr, t);
4657 /* Compute new value for AP. */
4658 t = fold_convert (TREE_TYPE (valist), rounded_size);
4659 t = build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t);
4660 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4661 gimplify_and_add (t, pre_p);
4663 addr = fold_convert (build_pointer_type (type), addr);
4665 if (indirect)
4666 addr = build_va_arg_indirect_ref (addr);
4668 return build_va_arg_indirect_ref (addr);
4671 /* Build an indirect-ref expression over the given TREE, which represents a
4672 piece of a va_arg() expansion. */
4673 tree
4674 build_va_arg_indirect_ref (tree addr)
4676 addr = build_fold_indirect_ref (addr);
4678 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4679 mf_mark (addr);
4681 return addr;
4684 /* Return a dummy expression of type TYPE in order to keep going after an
4685 error. */
4687 static tree
4688 dummy_object (tree type)
4690 tree t = build_int_cst (build_pointer_type (type), 0);
4691 return build1 (INDIRECT_REF, type, t);
4694 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4695 builtin function, but a very special sort of operator. */
4697 enum gimplify_status
4698 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4700 tree promoted_type, want_va_type, have_va_type;
4701 tree valist = TREE_OPERAND (*expr_p, 0);
4702 tree type = TREE_TYPE (*expr_p);
4703 tree t;
4705 /* Verify that valist is of the proper type. */
4706 want_va_type = va_list_type_node;
4707 have_va_type = TREE_TYPE (valist);
4709 if (have_va_type == error_mark_node)
4710 return GS_ERROR;
4712 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4714 /* If va_list is an array type, the argument may have decayed
4715 to a pointer type, e.g. by being passed to another function.
4716 In that case, unwrap both types so that we can compare the
4717 underlying records. */
4718 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4719 || POINTER_TYPE_P (have_va_type))
4721 want_va_type = TREE_TYPE (want_va_type);
4722 have_va_type = TREE_TYPE (have_va_type);
4726 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4728 error ("first argument to %<va_arg%> not of type %<va_list%>");
4729 return GS_ERROR;
4732 /* Generate a diagnostic for requesting data of a type that cannot
4733 be passed through `...' due to type promotion at the call site. */
4734 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4735 != type)
4737 static bool gave_help;
4739 /* Unfortunately, this is merely undefined, rather than a constraint
4740 violation, so we cannot make this an error. If this call is never
4741 executed, the program is still strictly conforming. */
4742 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4743 type, promoted_type);
4744 if (! gave_help)
4746 gave_help = true;
4747 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4748 promoted_type, type);
4751 /* We can, however, treat "undefined" any way we please.
4752 Call abort to encourage the user to fix the program. */
4753 inform ("if this code is reached, the program will abort");
4754 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4755 append_to_statement_list (t, pre_p);
4757 /* This is dead code, but go ahead and finish so that the
4758 mode of the result comes out right. */
4759 *expr_p = dummy_object (type);
4760 return GS_ALL_DONE;
4762 else
4764 /* Make it easier for the backends by protecting the valist argument
4765 from multiple evaluations. */
4766 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4768 /* For this case, the backends will be expecting a pointer to
4769 TREE_TYPE (va_list_type_node), but it's possible we've
4770 actually been given an array (an actual va_list_type_node).
4771 So fix it. */
4772 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4774 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4775 valist = build_fold_addr_expr_with_type (valist, p1);
4777 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4779 else
4780 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4782 if (!targetm.gimplify_va_arg_expr)
4783 /* FIXME:Once most targets are converted we should merely
4784 assert this is non-null. */
4785 return GS_ALL_DONE;
4787 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4788 return GS_OK;
4792 /* Expand EXP, a call to __builtin_va_end. */
4794 static rtx
4795 expand_builtin_va_end (tree exp)
4797 tree valist = CALL_EXPR_ARG (exp, 0);
4799 /* Evaluate for side effects, if needed. I hate macros that don't
4800 do that. */
4801 if (TREE_SIDE_EFFECTS (valist))
4802 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4804 return const0_rtx;
4807 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4808 builtin rather than just as an assignment in stdarg.h because of the
4809 nastiness of array-type va_list types. */
4811 static rtx
4812 expand_builtin_va_copy (tree exp)
4814 tree dst, src, t;
4816 dst = CALL_EXPR_ARG (exp, 0);
4817 src = CALL_EXPR_ARG (exp, 1);
4819 dst = stabilize_va_list (dst, 1);
4820 src = stabilize_va_list (src, 0);
4822 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4824 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4825 TREE_SIDE_EFFECTS (t) = 1;
4826 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4828 else
4830 rtx dstb, srcb, size;
4832 /* Evaluate to pointers. */
4833 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4834 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4835 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4836 VOIDmode, EXPAND_NORMAL);
4838 dstb = convert_memory_address (Pmode, dstb);
4839 srcb = convert_memory_address (Pmode, srcb);
4841 /* "Dereference" to BLKmode memories. */
4842 dstb = gen_rtx_MEM (BLKmode, dstb);
4843 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4844 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4845 srcb = gen_rtx_MEM (BLKmode, srcb);
4846 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4847 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4849 /* Copy. */
4850 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4853 return const0_rtx;
4856 /* Expand a call to one of the builtin functions __builtin_frame_address or
4857 __builtin_return_address. */
4859 static rtx
4860 expand_builtin_frame_address (tree fndecl, tree exp)
4862 /* The argument must be a nonnegative integer constant.
4863 It counts the number of frames to scan up the stack.
4864 The value is the return address saved in that frame. */
4865 if (call_expr_nargs (exp) == 0)
4866 /* Warning about missing arg was already issued. */
4867 return const0_rtx;
4868 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4870 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4871 error ("invalid argument to %<__builtin_frame_address%>");
4872 else
4873 error ("invalid argument to %<__builtin_return_address%>");
4874 return const0_rtx;
4876 else
4878 rtx tem
4879 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4880 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4882 /* Some ports cannot access arbitrary stack frames. */
4883 if (tem == NULL)
4885 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4886 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4887 else
4888 warning (0, "unsupported argument to %<__builtin_return_address%>");
4889 return const0_rtx;
4892 /* For __builtin_frame_address, return what we've got. */
4893 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4894 return tem;
4896 if (!REG_P (tem)
4897 && ! CONSTANT_P (tem))
4898 tem = copy_to_mode_reg (Pmode, tem);
4899 return tem;
4903 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4904 we failed and the caller should emit a normal call, otherwise try to get
4905 the result in TARGET, if convenient. */
4907 static rtx
4908 expand_builtin_alloca (tree exp, rtx target)
4910 rtx op0;
4911 rtx result;
4913 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
4914 should always expand to function calls. These can be intercepted
4915 in libmudflap. */
4916 if (flag_mudflap)
4917 return NULL_RTX;
4919 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4920 return NULL_RTX;
4922 /* Compute the argument. */
4923 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4925 /* Allocate the desired space. */
4926 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4927 result = convert_memory_address (ptr_mode, result);
4929 return result;
4932 /* Expand a call to a bswap builtin with argument ARG0. MODE
4933 is the mode to expand with. */
4935 static rtx
4936 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4938 enum machine_mode mode;
4939 tree arg;
4940 rtx op0;
4942 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4943 return NULL_RTX;
4945 arg = CALL_EXPR_ARG (exp, 0);
4946 mode = TYPE_MODE (TREE_TYPE (arg));
4947 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
4949 target = expand_unop (mode, bswap_optab, op0, target, 1);
4951 gcc_assert (target);
4953 return convert_to_mode (mode, target, 0);
4956 /* Expand a call to a unary builtin in EXP.
4957 Return NULL_RTX if a normal call should be emitted rather than expanding the
4958 function in-line. If convenient, the result should be placed in TARGET.
4959 SUBTARGET may be used as the target for computing one of EXP's operands. */
4961 static rtx
4962 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4963 rtx subtarget, optab op_optab)
4965 rtx op0;
4967 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4968 return NULL_RTX;
4970 /* Compute the argument. */
4971 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget, VOIDmode, 0);
4972 /* Compute op, into TARGET if possible.
4973 Set TARGET to wherever the result comes back. */
4974 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4975 op_optab, op0, target, 1);
4976 gcc_assert (target);
4978 return convert_to_mode (target_mode, target, 0);
4981 /* If the string passed to fputs is a constant and is one character
4982 long, we attempt to transform this call into __builtin_fputc(). */
4984 static rtx
4985 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
4987 /* Verify the arguments in the original call. */
4988 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4990 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
4991 CALL_EXPR_ARG (exp, 1),
4992 (target == const0_rtx),
4993 unlocked, NULL_TREE);
4994 if (result)
4995 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
4997 return NULL_RTX;
5000 /* Expand a call to __builtin_expect. We just return our argument
5001 as the builtin_expect semantic should've been already executed by
5002 tree branch prediction pass. */
5004 static rtx
5005 expand_builtin_expect (tree exp, rtx target)
5007 tree arg, c;
5009 if (call_expr_nargs (exp) < 2)
5010 return const0_rtx;
5011 arg = CALL_EXPR_ARG (exp, 0);
5012 c = CALL_EXPR_ARG (exp, 1);
5014 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5015 /* When guessing was done, the hints should be already stripped away. */
5016 gcc_assert (!flag_guess_branch_prob);
5017 return target;
5020 void
5021 expand_builtin_trap (void)
5023 #ifdef HAVE_trap
5024 if (HAVE_trap)
5025 emit_insn (gen_trap ());
5026 else
5027 #endif
5028 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5029 emit_barrier ();
5032 /* Expand EXP, a call to fabs, fabsf or fabsl.
5033 Return NULL_RTX if a normal call should be emitted rather than expanding
5034 the function inline. If convenient, the result should be placed
5035 in TARGET. SUBTARGET may be used as the target for computing
5036 the operand. */
5038 static rtx
5039 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5041 enum machine_mode mode;
5042 tree arg;
5043 rtx op0;
5045 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5046 return NULL_RTX;
5048 arg = CALL_EXPR_ARG (exp, 0);
5049 mode = TYPE_MODE (TREE_TYPE (arg));
5050 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
5051 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5054 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5055 Return NULL is a normal call should be emitted rather than expanding the
5056 function inline. If convenient, the result should be placed in TARGET.
5057 SUBTARGET may be used as the target for computing the operand. */
5059 static rtx
5060 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5062 rtx op0, op1;
5063 tree arg;
5065 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5066 return NULL_RTX;
5068 arg = CALL_EXPR_ARG (exp, 0);
5069 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5071 arg = CALL_EXPR_ARG (exp, 1);
5072 op1 = expand_normal (arg);
5074 return expand_copysign (op0, op1, target);
5077 /* Create a new constant string literal and return a char* pointer to it.
5078 The STRING_CST value is the LEN characters at STR. */
5079 tree
5080 build_string_literal (int len, const char *str)
5082 tree t, elem, index, type;
5084 t = build_string (len, str);
5085 elem = build_type_variant (char_type_node, 1, 0);
5086 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5087 type = build_array_type (elem, index);
5088 TREE_TYPE (t) = type;
5089 TREE_CONSTANT (t) = 1;
5090 TREE_INVARIANT (t) = 1;
5091 TREE_READONLY (t) = 1;
5092 TREE_STATIC (t) = 1;
5094 type = build_pointer_type (type);
5095 t = build1 (ADDR_EXPR, type, t);
5097 type = build_pointer_type (elem);
5098 t = build1 (NOP_EXPR, type, t);
5099 return t;
5102 /* Expand EXP, a call to printf or printf_unlocked.
5103 Return NULL_RTX if a normal call should be emitted rather than transforming
5104 the function inline. If convenient, the result should be placed in
5105 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5106 call. */
5107 static rtx
5108 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5109 bool unlocked)
5111 /* If we're using an unlocked function, assume the other unlocked
5112 functions exist explicitly. */
5113 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5114 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5115 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5116 : implicit_built_in_decls[BUILT_IN_PUTS];
5117 const char *fmt_str;
5118 tree fn = 0;
5119 tree fmt, arg;
5120 int nargs = call_expr_nargs (exp);
5122 /* If the return value is used, don't do the transformation. */
5123 if (target != const0_rtx)
5124 return NULL_RTX;
5126 /* Verify the required arguments in the original call. */
5127 if (nargs == 0)
5128 return NULL_RTX;
5129 fmt = CALL_EXPR_ARG (exp, 0);
5130 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5131 return NULL_RTX;
5133 /* Check whether the format is a literal string constant. */
5134 fmt_str = c_getstr (fmt);
5135 if (fmt_str == NULL)
5136 return NULL_RTX;
5138 if (!init_target_chars ())
5139 return NULL_RTX;
5141 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5142 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5144 if ((nargs != 2)
5145 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5146 return NULL_RTX;
5147 if (fn_puts)
5148 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5150 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5151 else if (strcmp (fmt_str, target_percent_c) == 0)
5153 if ((nargs != 2)
5154 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5155 return NULL_RTX;
5156 if (fn_putchar)
5157 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5159 else
5161 /* We can't handle anything else with % args or %% ... yet. */
5162 if (strchr (fmt_str, target_percent))
5163 return NULL_RTX;
5165 if (nargs > 1)
5166 return NULL_RTX;
5168 /* If the format specifier was "", printf does nothing. */
5169 if (fmt_str[0] == '\0')
5170 return const0_rtx;
5171 /* If the format specifier has length of 1, call putchar. */
5172 if (fmt_str[1] == '\0')
5174 /* Given printf("c"), (where c is any one character,)
5175 convert "c"[0] to an int and pass that to the replacement
5176 function. */
5177 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5178 if (fn_putchar)
5179 fn = build_call_expr (fn_putchar, 1, arg);
5181 else
5183 /* If the format specifier was "string\n", call puts("string"). */
5184 size_t len = strlen (fmt_str);
5185 if ((unsigned char)fmt_str[len - 1] == target_newline)
5187 /* Create a NUL-terminated string that's one char shorter
5188 than the original, stripping off the trailing '\n'. */
5189 char *newstr = alloca (len);
5190 memcpy (newstr, fmt_str, len - 1);
5191 newstr[len - 1] = 0;
5192 arg = build_string_literal (len, newstr);
5193 if (fn_puts)
5194 fn = build_call_expr (fn_puts, 1, arg);
5196 else
5197 /* We'd like to arrange to call fputs(string,stdout) here,
5198 but we need stdout and don't have a way to get it yet. */
5199 return NULL_RTX;
5203 if (!fn)
5204 return NULL_RTX;
5205 if (TREE_CODE (fn) == CALL_EXPR)
5206 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5207 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5210 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5211 Return NULL_RTX if a normal call should be emitted rather than transforming
5212 the function inline. If convenient, the result should be placed in
5213 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5214 call. */
5215 static rtx
5216 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5217 bool unlocked)
5219 /* If we're using an unlocked function, assume the other unlocked
5220 functions exist explicitly. */
5221 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5222 : implicit_built_in_decls[BUILT_IN_FPUTC];
5223 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5224 : implicit_built_in_decls[BUILT_IN_FPUTS];
5225 const char *fmt_str;
5226 tree fn = 0;
5227 tree fmt, fp, arg;
5228 int nargs = call_expr_nargs (exp);
5230 /* If the return value is used, don't do the transformation. */
5231 if (target != const0_rtx)
5232 return NULL_RTX;
5234 /* Verify the required arguments in the original call. */
5235 if (nargs < 2)
5236 return NULL_RTX;
5237 fp = CALL_EXPR_ARG (exp, 0);
5238 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5239 return NULL_RTX;
5240 fmt = CALL_EXPR_ARG (exp, 1);
5241 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5242 return NULL_RTX;
5244 /* Check whether the format is a literal string constant. */
5245 fmt_str = c_getstr (fmt);
5246 if (fmt_str == NULL)
5247 return NULL_RTX;
5249 if (!init_target_chars ())
5250 return NULL_RTX;
5252 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5253 if (strcmp (fmt_str, target_percent_s) == 0)
5255 if ((nargs != 3)
5256 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5257 return NULL_RTX;
5258 arg = CALL_EXPR_ARG (exp, 2);
5259 if (fn_fputs)
5260 fn = build_call_expr (fn_fputs, 2, arg, fp);
5262 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5263 else if (strcmp (fmt_str, target_percent_c) == 0)
5265 if ((nargs != 3)
5266 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5267 return NULL_RTX;
5268 arg = CALL_EXPR_ARG (exp, 2);
5269 if (fn_fputc)
5270 fn = build_call_expr (fn_fputc, 2, arg, fp);
5272 else
5274 /* We can't handle anything else with % args or %% ... yet. */
5275 if (strchr (fmt_str, target_percent))
5276 return NULL_RTX;
5278 if (nargs > 2)
5279 return NULL_RTX;
5281 /* If the format specifier was "", fprintf does nothing. */
5282 if (fmt_str[0] == '\0')
5284 /* Evaluate and ignore FILE* argument for side-effects. */
5285 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5286 return const0_rtx;
5289 /* When "string" doesn't contain %, replace all cases of
5290 fprintf(stream,string) with fputs(string,stream). The fputs
5291 builtin will take care of special cases like length == 1. */
5292 if (fn_fputs)
5293 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5296 if (!fn)
5297 return NULL_RTX;
5298 if (TREE_CODE (fn) == CALL_EXPR)
5299 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5300 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5303 /* Expand a call EXP to sprintf. Return NULL_RTX if
5304 a normal call should be emitted rather than expanding the function
5305 inline. If convenient, the result should be placed in TARGET with
5306 mode MODE. */
5308 static rtx
5309 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5311 tree dest, fmt;
5312 const char *fmt_str;
5313 int nargs = call_expr_nargs (exp);
5315 /* Verify the required arguments in the original call. */
5316 if (nargs < 2)
5317 return NULL_RTX;
5318 dest = CALL_EXPR_ARG (exp, 0);
5319 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5320 return NULL_RTX;
5321 fmt = CALL_EXPR_ARG (exp, 0);
5322 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5323 return NULL_RTX;
5325 /* Check whether the format is a literal string constant. */
5326 fmt_str = c_getstr (fmt);
5327 if (fmt_str == NULL)
5328 return NULL_RTX;
5330 if (!init_target_chars ())
5331 return NULL_RTX;
5333 /* If the format doesn't contain % args or %%, use strcpy. */
5334 if (strchr (fmt_str, target_percent) == 0)
5336 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5337 tree exp;
5339 if ((nargs > 2) || ! fn)
5340 return NULL_RTX;
5341 expand_expr (build_call_expr (fn, 2, dest, fmt),
5342 const0_rtx, VOIDmode, EXPAND_NORMAL);
5343 if (target == const0_rtx)
5344 return const0_rtx;
5345 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5346 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5348 /* If the format is "%s", use strcpy if the result isn't used. */
5349 else if (strcmp (fmt_str, target_percent_s) == 0)
5351 tree fn, arg, len;
5352 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5354 if (! fn)
5355 return NULL_RTX;
5356 if (nargs != 3)
5357 return NULL_RTX;
5358 arg = CALL_EXPR_ARG (exp, 2);
5359 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5360 return NULL_RTX;
5362 if (target != const0_rtx)
5364 len = c_strlen (arg, 1);
5365 if (! len || TREE_CODE (len) != INTEGER_CST)
5366 return NULL_RTX;
5368 else
5369 len = NULL_TREE;
5371 expand_expr (build_call_expr (fn, 2, dest, arg),
5372 const0_rtx, VOIDmode, EXPAND_NORMAL);
5374 if (target == const0_rtx)
5375 return const0_rtx;
5376 return expand_expr (len, target, mode, EXPAND_NORMAL);
5379 return NULL_RTX;
5382 /* Expand a call to either the entry or exit function profiler. */
5384 static rtx
5385 expand_builtin_profile_func (bool exitp)
5387 rtx this, which;
5389 this = DECL_RTL (current_function_decl);
5390 gcc_assert (MEM_P (this));
5391 this = XEXP (this, 0);
5393 if (exitp)
5394 which = profile_function_exit_libfunc;
5395 else
5396 which = profile_function_entry_libfunc;
5398 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5399 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5401 Pmode);
5403 return const0_rtx;
5406 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5408 static rtx
5409 round_trampoline_addr (rtx tramp)
5411 rtx temp, addend, mask;
5413 /* If we don't need too much alignment, we'll have been guaranteed
5414 proper alignment by get_trampoline_type. */
5415 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5416 return tramp;
5418 /* Round address up to desired boundary. */
5419 temp = gen_reg_rtx (Pmode);
5420 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5421 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5423 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5424 temp, 0, OPTAB_LIB_WIDEN);
5425 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5426 temp, 0, OPTAB_LIB_WIDEN);
5428 return tramp;
5431 static rtx
5432 expand_builtin_init_trampoline (tree exp)
5434 tree t_tramp, t_func, t_chain;
5435 rtx r_tramp, r_func, r_chain;
5436 #ifdef TRAMPOLINE_TEMPLATE
5437 rtx blktramp;
5438 #endif
5440 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5441 POINTER_TYPE, VOID_TYPE))
5442 return NULL_RTX;
5444 t_tramp = CALL_EXPR_ARG (exp, 0);
5445 t_func = CALL_EXPR_ARG (exp, 1);
5446 t_chain = CALL_EXPR_ARG (exp, 2);
5448 r_tramp = expand_normal (t_tramp);
5449 r_func = expand_normal (t_func);
5450 r_chain = expand_normal (t_chain);
5452 /* Generate insns to initialize the trampoline. */
5453 r_tramp = round_trampoline_addr (r_tramp);
5454 #ifdef TRAMPOLINE_TEMPLATE
5455 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5456 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5457 emit_block_move (blktramp, assemble_trampoline_template (),
5458 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5459 #endif
5460 trampolines_created = 1;
5461 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5463 return const0_rtx;
5466 static rtx
5467 expand_builtin_adjust_trampoline (tree exp)
5469 rtx tramp;
5471 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5472 return NULL_RTX;
5474 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5475 tramp = round_trampoline_addr (tramp);
5476 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5477 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5478 #endif
5480 return tramp;
5483 /* Expand a call to the built-in signbit, signbitf or signbitl function.
5484 Return NULL_RTX if a normal call should be emitted rather than expanding
5485 the function in-line. EXP is the expression that is a call to the builtin
5486 function; if convenient, the result should be placed in TARGET. */
5488 static rtx
5489 expand_builtin_signbit (tree exp, rtx target)
5491 const struct real_format *fmt;
5492 enum machine_mode fmode, imode, rmode;
5493 HOST_WIDE_INT hi, lo;
5494 tree arg;
5495 int word, bitpos;
5496 rtx temp;
5498 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5499 return NULL_RTX;
5501 arg = CALL_EXPR_ARG (exp, 0);
5502 fmode = TYPE_MODE (TREE_TYPE (arg));
5503 rmode = TYPE_MODE (TREE_TYPE (exp));
5504 fmt = REAL_MODE_FORMAT (fmode);
5506 /* For floating point formats without a sign bit, implement signbit
5507 as "ARG < 0.0". */
5508 bitpos = fmt->signbit_ro;
5509 if (bitpos < 0)
5511 /* But we can't do this if the format supports signed zero. */
5512 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5513 return NULL_RTX;
5515 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5516 build_real (TREE_TYPE (arg), dconst0));
5517 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5520 temp = expand_normal (arg);
5521 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5523 imode = int_mode_for_mode (fmode);
5524 if (imode == BLKmode)
5525 return NULL_RTX;
5526 temp = gen_lowpart (imode, temp);
5528 else
5530 imode = word_mode;
5531 /* Handle targets with different FP word orders. */
5532 if (FLOAT_WORDS_BIG_ENDIAN)
5533 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5534 else
5535 word = bitpos / BITS_PER_WORD;
5536 temp = operand_subword_force (temp, word, fmode);
5537 bitpos = bitpos % BITS_PER_WORD;
5540 /* Force the intermediate word_mode (or narrower) result into a
5541 register. This avoids attempting to create paradoxical SUBREGs
5542 of floating point modes below. */
5543 temp = force_reg (imode, temp);
5545 /* If the bitpos is within the "result mode" lowpart, the operation
5546 can be implement with a single bitwise AND. Otherwise, we need
5547 a right shift and an AND. */
5549 if (bitpos < GET_MODE_BITSIZE (rmode))
5551 if (bitpos < HOST_BITS_PER_WIDE_INT)
5553 hi = 0;
5554 lo = (HOST_WIDE_INT) 1 << bitpos;
5556 else
5558 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5559 lo = 0;
5562 if (imode != rmode)
5563 temp = gen_lowpart (rmode, temp);
5564 temp = expand_binop (rmode, and_optab, temp,
5565 immed_double_const (lo, hi, rmode),
5566 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5568 else
5570 /* Perform a logical right shift to place the signbit in the least
5571 significant bit, then truncate the result to the desired mode
5572 and mask just this bit. */
5573 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5574 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5575 temp = gen_lowpart (rmode, temp);
5576 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5577 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5580 return temp;
5583 /* Expand fork or exec calls. TARGET is the desired target of the
5584 call. EXP is the call. FN is the
5585 identificator of the actual function. IGNORE is nonzero if the
5586 value is to be ignored. */
5588 static rtx
5589 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5591 tree id, decl;
5592 tree call;
5594 /* If we are not profiling, just call the function. */
5595 if (!profile_arc_flag)
5596 return NULL_RTX;
5598 /* Otherwise call the wrapper. This should be equivalent for the rest of
5599 compiler, so the code does not diverge, and the wrapper may run the
5600 code necessary for keeping the profiling sane. */
5602 switch (DECL_FUNCTION_CODE (fn))
5604 case BUILT_IN_FORK:
5605 id = get_identifier ("__gcov_fork");
5606 break;
5608 case BUILT_IN_EXECL:
5609 id = get_identifier ("__gcov_execl");
5610 break;
5612 case BUILT_IN_EXECV:
5613 id = get_identifier ("__gcov_execv");
5614 break;
5616 case BUILT_IN_EXECLP:
5617 id = get_identifier ("__gcov_execlp");
5618 break;
5620 case BUILT_IN_EXECLE:
5621 id = get_identifier ("__gcov_execle");
5622 break;
5624 case BUILT_IN_EXECVP:
5625 id = get_identifier ("__gcov_execvp");
5626 break;
5628 case BUILT_IN_EXECVE:
5629 id = get_identifier ("__gcov_execve");
5630 break;
5632 default:
5633 gcc_unreachable ();
5636 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5637 DECL_EXTERNAL (decl) = 1;
5638 TREE_PUBLIC (decl) = 1;
5639 DECL_ARTIFICIAL (decl) = 1;
5640 TREE_NOTHROW (decl) = 1;
5641 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5642 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5643 call = rewrite_call_expr (exp, 0, decl, 0);
5644 return expand_call (call, target, ignore);
5649 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5650 the pointer in these functions is void*, the tree optimizers may remove
5651 casts. The mode computed in expand_builtin isn't reliable either, due
5652 to __sync_bool_compare_and_swap.
5654 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5655 group of builtins. This gives us log2 of the mode size. */
5657 static inline enum machine_mode
5658 get_builtin_sync_mode (int fcode_diff)
5660 /* The size is not negotiable, so ask not to get BLKmode in return
5661 if the target indicates that a smaller size would be better. */
5662 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5665 /* Expand the memory expression LOC and return the appropriate memory operand
5666 for the builtin_sync operations. */
5668 static rtx
5669 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5671 rtx addr, mem;
5673 addr = expand_expr (loc, NULL, Pmode, EXPAND_SUM);
5675 /* Note that we explicitly do not want any alias information for this
5676 memory, so that we kill all other live memories. Otherwise we don't
5677 satisfy the full barrier semantics of the intrinsic. */
5678 mem = validize_mem (gen_rtx_MEM (mode, addr));
5680 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5681 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5682 MEM_VOLATILE_P (mem) = 1;
5684 return mem;
5687 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5688 EXP is the CALL_EXPR. CODE is the rtx code
5689 that corresponds to the arithmetic or logical operation from the name;
5690 an exception here is that NOT actually means NAND. TARGET is an optional
5691 place for us to store the results; AFTER is true if this is the
5692 fetch_and_xxx form. IGNORE is true if we don't actually care about
5693 the result of the operation at all. */
5695 static rtx
5696 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5697 enum rtx_code code, bool after,
5698 rtx target, bool ignore)
5700 rtx val, mem;
5702 /* Expand the operands. */
5703 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5705 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL, mode, EXPAND_NORMAL);
5706 /* If VAL is promoted to a wider mode, convert it back to MODE. */
5707 val = convert_to_mode (mode, val, 1);
5709 if (ignore)
5710 return expand_sync_operation (mem, val, code);
5711 else
5712 return expand_sync_fetch_operation (mem, val, code, after, target);
5715 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5716 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5717 true if this is the boolean form. TARGET is a place for us to store the
5718 results; this is NOT optional if IS_BOOL is true. */
5720 static rtx
5721 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5722 bool is_bool, rtx target)
5724 rtx old_val, new_val, mem;
5726 /* Expand the operands. */
5727 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5730 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL, mode, EXPAND_NORMAL);
5731 /* If OLD_VAL is promoted to a wider mode, convert it back to MODE. */
5732 old_val = convert_to_mode (mode, old_val, 1);
5734 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL, mode, EXPAND_NORMAL);
5735 /* If NEW_VAL is promoted to a wider mode, convert it back to MODE. */
5736 new_val = convert_to_mode (mode, new_val, 1);
5738 if (is_bool)
5739 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5740 else
5741 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5744 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5745 general form is actually an atomic exchange, and some targets only
5746 support a reduced form with the second argument being a constant 1.
5747 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5748 the results. */
5750 static rtx
5751 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5752 rtx target)
5754 rtx val, mem;
5756 /* Expand the operands. */
5757 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5758 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL, mode, EXPAND_NORMAL);
5759 /* If VAL is promoted to a wider mode, convert it back to MODE. */
5760 val = convert_to_mode (mode, val, 1);
5762 return expand_sync_lock_test_and_set (mem, val, target);
5765 /* Expand the __sync_synchronize intrinsic. */
5767 static void
5768 expand_builtin_synchronize (void)
5770 tree x;
5772 #ifdef HAVE_memory_barrier
5773 if (HAVE_memory_barrier)
5775 emit_insn (gen_memory_barrier ());
5776 return;
5778 #endif
5780 /* If no explicit memory barrier instruction is available, create an
5781 empty asm stmt with a memory clobber. */
5782 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
5783 tree_cons (NULL, build_string (6, "memory"), NULL));
5784 ASM_VOLATILE_P (x) = 1;
5785 expand_asm_expr (x);
5788 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5790 static void
5791 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5793 enum insn_code icode;
5794 rtx mem, insn;
5795 rtx val = const0_rtx;
5797 /* Expand the operands. */
5798 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5800 /* If there is an explicit operation in the md file, use it. */
5801 icode = sync_lock_release[mode];
5802 if (icode != CODE_FOR_nothing)
5804 if (!insn_data[icode].operand[1].predicate (val, mode))
5805 val = force_reg (mode, val);
5807 insn = GEN_FCN (icode) (mem, val);
5808 if (insn)
5810 emit_insn (insn);
5811 return;
5815 /* Otherwise we can implement this operation by emitting a barrier
5816 followed by a store of zero. */
5817 expand_builtin_synchronize ();
5818 emit_move_insn (mem, val);
5821 /* Expand an expression EXP that calls a built-in function,
5822 with result going to TARGET if that's convenient
5823 (and in mode MODE if that's convenient).
5824 SUBTARGET may be used as the target for computing one of EXP's operands.
5825 IGNORE is nonzero if the value is to be ignored. */
5828 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5829 int ignore)
5831 tree fndecl = get_callee_fndecl (exp);
5832 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5833 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5835 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5836 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5838 /* When not optimizing, generate calls to library functions for a certain
5839 set of builtins. */
5840 if (!optimize
5841 && !called_as_built_in (fndecl)
5842 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5843 && fcode != BUILT_IN_ALLOCA)
5844 return expand_call (exp, target, ignore);
5846 /* The built-in function expanders test for target == const0_rtx
5847 to determine whether the function's result will be ignored. */
5848 if (ignore)
5849 target = const0_rtx;
5851 /* If the result of a pure or const built-in function is ignored, and
5852 none of its arguments are volatile, we can avoid expanding the
5853 built-in call and just evaluate the arguments for side-effects. */
5854 if (target == const0_rtx
5855 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
5857 bool volatilep = false;
5858 tree arg;
5859 call_expr_arg_iterator iter;
5861 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5862 if (TREE_THIS_VOLATILE (arg))
5864 volatilep = true;
5865 break;
5868 if (! volatilep)
5870 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5871 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5872 return const0_rtx;
5876 switch (fcode)
5878 CASE_FLT_FN (BUILT_IN_FABS):
5879 target = expand_builtin_fabs (exp, target, subtarget);
5880 if (target)
5881 return target;
5882 break;
5884 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5885 target = expand_builtin_copysign (exp, target, subtarget);
5886 if (target)
5887 return target;
5888 break;
5890 /* Just do a normal library call if we were unable to fold
5891 the values. */
5892 CASE_FLT_FN (BUILT_IN_CABS):
5893 break;
5895 CASE_FLT_FN (BUILT_IN_EXP):
5896 CASE_FLT_FN (BUILT_IN_EXP10):
5897 CASE_FLT_FN (BUILT_IN_POW10):
5898 CASE_FLT_FN (BUILT_IN_EXP2):
5899 CASE_FLT_FN (BUILT_IN_EXPM1):
5900 CASE_FLT_FN (BUILT_IN_LOGB):
5901 CASE_FLT_FN (BUILT_IN_LOG):
5902 CASE_FLT_FN (BUILT_IN_LOG10):
5903 CASE_FLT_FN (BUILT_IN_LOG2):
5904 CASE_FLT_FN (BUILT_IN_LOG1P):
5905 CASE_FLT_FN (BUILT_IN_TAN):
5906 CASE_FLT_FN (BUILT_IN_ASIN):
5907 CASE_FLT_FN (BUILT_IN_ACOS):
5908 CASE_FLT_FN (BUILT_IN_ATAN):
5909 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5910 because of possible accuracy problems. */
5911 if (! flag_unsafe_math_optimizations)
5912 break;
5913 CASE_FLT_FN (BUILT_IN_SQRT):
5914 CASE_FLT_FN (BUILT_IN_FLOOR):
5915 CASE_FLT_FN (BUILT_IN_CEIL):
5916 CASE_FLT_FN (BUILT_IN_TRUNC):
5917 CASE_FLT_FN (BUILT_IN_ROUND):
5918 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5919 CASE_FLT_FN (BUILT_IN_RINT):
5920 target = expand_builtin_mathfn (exp, target, subtarget);
5921 if (target)
5922 return target;
5923 break;
5925 CASE_FLT_FN (BUILT_IN_ILOGB):
5926 if (! flag_unsafe_math_optimizations)
5927 break;
5928 CASE_FLT_FN (BUILT_IN_ISINF):
5929 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5930 if (target)
5931 return target;
5932 break;
5934 CASE_FLT_FN (BUILT_IN_LCEIL):
5935 CASE_FLT_FN (BUILT_IN_LLCEIL):
5936 CASE_FLT_FN (BUILT_IN_LFLOOR):
5937 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5938 target = expand_builtin_int_roundingfn (exp, target, subtarget);
5939 if (target)
5940 return target;
5941 break;
5943 CASE_FLT_FN (BUILT_IN_LRINT):
5944 CASE_FLT_FN (BUILT_IN_LLRINT):
5945 CASE_FLT_FN (BUILT_IN_LROUND):
5946 CASE_FLT_FN (BUILT_IN_LLROUND):
5947 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
5948 if (target)
5949 return target;
5950 break;
5952 CASE_FLT_FN (BUILT_IN_POW):
5953 target = expand_builtin_pow (exp, target, subtarget);
5954 if (target)
5955 return target;
5956 break;
5958 CASE_FLT_FN (BUILT_IN_POWI):
5959 target = expand_builtin_powi (exp, target, subtarget);
5960 if (target)
5961 return target;
5962 break;
5964 CASE_FLT_FN (BUILT_IN_ATAN2):
5965 CASE_FLT_FN (BUILT_IN_LDEXP):
5966 if (! flag_unsafe_math_optimizations)
5967 break;
5969 CASE_FLT_FN (BUILT_IN_FMOD):
5970 CASE_FLT_FN (BUILT_IN_REMAINDER):
5971 CASE_FLT_FN (BUILT_IN_DREM):
5972 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5973 if (target)
5974 return target;
5975 break;
5977 CASE_FLT_FN (BUILT_IN_CEXPI):
5978 target = expand_builtin_cexpi (exp, target, subtarget);
5979 gcc_assert (target);
5980 return target;
5982 CASE_FLT_FN (BUILT_IN_SIN):
5983 CASE_FLT_FN (BUILT_IN_COS):
5984 if (! flag_unsafe_math_optimizations)
5985 break;
5986 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5987 if (target)
5988 return target;
5989 break;
5991 CASE_FLT_FN (BUILT_IN_SINCOS):
5992 if (! flag_unsafe_math_optimizations)
5993 break;
5994 target = expand_builtin_sincos (exp);
5995 if (target)
5996 return target;
5997 break;
5999 case BUILT_IN_APPLY_ARGS:
6000 return expand_builtin_apply_args ();
6002 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6003 FUNCTION with a copy of the parameters described by
6004 ARGUMENTS, and ARGSIZE. It returns a block of memory
6005 allocated on the stack into which is stored all the registers
6006 that might possibly be used for returning the result of a
6007 function. ARGUMENTS is the value returned by
6008 __builtin_apply_args. ARGSIZE is the number of bytes of
6009 arguments that must be copied. ??? How should this value be
6010 computed? We'll also need a safe worst case value for varargs
6011 functions. */
6012 case BUILT_IN_APPLY:
6013 if (!validate_arglist (exp, POINTER_TYPE,
6014 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6015 && !validate_arglist (exp, REFERENCE_TYPE,
6016 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6017 return const0_rtx;
6018 else
6020 rtx ops[3];
6022 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6023 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6024 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6026 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6029 /* __builtin_return (RESULT) causes the function to return the
6030 value described by RESULT. RESULT is address of the block of
6031 memory returned by __builtin_apply. */
6032 case BUILT_IN_RETURN:
6033 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6034 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6035 return const0_rtx;
6037 case BUILT_IN_SAVEREGS:
6038 return expand_builtin_saveregs ();
6040 case BUILT_IN_ARGS_INFO:
6041 return expand_builtin_args_info (exp);
6043 /* Return the address of the first anonymous stack arg. */
6044 case BUILT_IN_NEXT_ARG:
6045 if (fold_builtin_next_arg (exp, false))
6046 return const0_rtx;
6047 return expand_builtin_next_arg ();
6049 case BUILT_IN_CLASSIFY_TYPE:
6050 return expand_builtin_classify_type (exp);
6052 case BUILT_IN_CONSTANT_P:
6053 return const0_rtx;
6055 case BUILT_IN_FRAME_ADDRESS:
6056 case BUILT_IN_RETURN_ADDRESS:
6057 return expand_builtin_frame_address (fndecl, exp);
6059 /* Returns the address of the area where the structure is returned.
6060 0 otherwise. */
6061 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6062 if (call_expr_nargs (exp) != 0
6063 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6064 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6065 return const0_rtx;
6066 else
6067 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6069 case BUILT_IN_ALLOCA:
6070 target = expand_builtin_alloca (exp, target);
6071 if (target)
6072 return target;
6073 break;
6075 case BUILT_IN_STACK_SAVE:
6076 return expand_stack_save ();
6078 case BUILT_IN_STACK_RESTORE:
6079 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6080 return const0_rtx;
6082 case BUILT_IN_BSWAP32:
6083 case BUILT_IN_BSWAP64:
6084 target = expand_builtin_bswap (exp, target, subtarget);
6086 if (target)
6087 return target;
6088 break;
6090 CASE_INT_FN (BUILT_IN_FFS):
6091 case BUILT_IN_FFSIMAX:
6092 target = expand_builtin_unop (target_mode, exp, target,
6093 subtarget, ffs_optab);
6094 if (target)
6095 return target;
6096 break;
6098 CASE_INT_FN (BUILT_IN_CLZ):
6099 case BUILT_IN_CLZIMAX:
6100 target = expand_builtin_unop (target_mode, exp, target,
6101 subtarget, clz_optab);
6102 if (target)
6103 return target;
6104 break;
6106 CASE_INT_FN (BUILT_IN_CTZ):
6107 case BUILT_IN_CTZIMAX:
6108 target = expand_builtin_unop (target_mode, exp, target,
6109 subtarget, ctz_optab);
6110 if (target)
6111 return target;
6112 break;
6114 CASE_INT_FN (BUILT_IN_POPCOUNT):
6115 case BUILT_IN_POPCOUNTIMAX:
6116 target = expand_builtin_unop (target_mode, exp, target,
6117 subtarget, popcount_optab);
6118 if (target)
6119 return target;
6120 break;
6122 CASE_INT_FN (BUILT_IN_PARITY):
6123 case BUILT_IN_PARITYIMAX:
6124 target = expand_builtin_unop (target_mode, exp, target,
6125 subtarget, parity_optab);
6126 if (target)
6127 return target;
6128 break;
6130 case BUILT_IN_STRLEN:
6131 target = expand_builtin_strlen (exp, target, target_mode);
6132 if (target)
6133 return target;
6134 break;
6136 case BUILT_IN_STRCPY:
6137 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6138 if (target)
6139 return target;
6140 break;
6142 case BUILT_IN_STRNCPY:
6143 target = expand_builtin_strncpy (exp, target, mode);
6144 if (target)
6145 return target;
6146 break;
6148 case BUILT_IN_STPCPY:
6149 target = expand_builtin_stpcpy (exp, target, mode);
6150 if (target)
6151 return target;
6152 break;
6154 case BUILT_IN_STRCAT:
6155 target = expand_builtin_strcat (fndecl, exp, target, mode);
6156 if (target)
6157 return target;
6158 break;
6160 case BUILT_IN_STRNCAT:
6161 target = expand_builtin_strncat (exp, target, mode);
6162 if (target)
6163 return target;
6164 break;
6166 case BUILT_IN_STRSPN:
6167 target = expand_builtin_strspn (exp, target, mode);
6168 if (target)
6169 return target;
6170 break;
6172 case BUILT_IN_STRCSPN:
6173 target = expand_builtin_strcspn (exp, target, mode);
6174 if (target)
6175 return target;
6176 break;
6178 case BUILT_IN_STRSTR:
6179 target = expand_builtin_strstr (exp, target, mode);
6180 if (target)
6181 return target;
6182 break;
6184 case BUILT_IN_STRPBRK:
6185 target = expand_builtin_strpbrk (exp, target, mode);
6186 if (target)
6187 return target;
6188 break;
6190 case BUILT_IN_INDEX:
6191 case BUILT_IN_STRCHR:
6192 target = expand_builtin_strchr (exp, target, mode);
6193 if (target)
6194 return target;
6195 break;
6197 case BUILT_IN_RINDEX:
6198 case BUILT_IN_STRRCHR:
6199 target = expand_builtin_strrchr (exp, target, mode);
6200 if (target)
6201 return target;
6202 break;
6204 case BUILT_IN_MEMCPY:
6205 target = expand_builtin_memcpy (exp, target, mode);
6206 if (target)
6207 return target;
6208 break;
6210 case BUILT_IN_MEMPCPY:
6211 target = expand_builtin_mempcpy (exp, target, mode);
6212 if (target)
6213 return target;
6214 break;
6216 case BUILT_IN_MEMMOVE:
6217 target = expand_builtin_memmove (exp, target, mode, ignore);
6218 if (target)
6219 return target;
6220 break;
6222 case BUILT_IN_BCOPY:
6223 target = expand_builtin_bcopy (exp, ignore);
6224 if (target)
6225 return target;
6226 break;
6228 case BUILT_IN_MEMSET:
6229 target = expand_builtin_memset (exp, target, mode);
6230 if (target)
6231 return target;
6232 break;
6234 case BUILT_IN_BZERO:
6235 target = expand_builtin_bzero (exp);
6236 if (target)
6237 return target;
6238 break;
6240 case BUILT_IN_STRCMP:
6241 target = expand_builtin_strcmp (exp, target, mode);
6242 if (target)
6243 return target;
6244 break;
6246 case BUILT_IN_STRNCMP:
6247 target = expand_builtin_strncmp (exp, target, mode);
6248 if (target)
6249 return target;
6250 break;
6252 case BUILT_IN_BCMP:
6253 case BUILT_IN_MEMCMP:
6254 target = expand_builtin_memcmp (exp, target, mode);
6255 if (target)
6256 return target;
6257 break;
6259 case BUILT_IN_SETJMP:
6260 /* This should have been lowered to the builtins below. */
6261 gcc_unreachable ();
6263 case BUILT_IN_SETJMP_SETUP:
6264 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6265 and the receiver label. */
6266 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6268 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6269 VOIDmode, EXPAND_NORMAL);
6270 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6271 rtx label_r = label_rtx (label);
6273 /* This is copied from the handling of non-local gotos. */
6274 expand_builtin_setjmp_setup (buf_addr, label_r);
6275 nonlocal_goto_handler_labels
6276 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6277 nonlocal_goto_handler_labels);
6278 /* ??? Do not let expand_label treat us as such since we would
6279 not want to be both on the list of non-local labels and on
6280 the list of forced labels. */
6281 FORCED_LABEL (label) = 0;
6282 return const0_rtx;
6284 break;
6286 case BUILT_IN_SETJMP_DISPATCHER:
6287 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6288 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6290 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6291 rtx label_r = label_rtx (label);
6293 /* Remove the dispatcher label from the list of non-local labels
6294 since the receiver labels have been added to it above. */
6295 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6296 return const0_rtx;
6298 break;
6300 case BUILT_IN_SETJMP_RECEIVER:
6301 /* __builtin_setjmp_receiver is passed the receiver label. */
6302 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6304 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6305 rtx label_r = label_rtx (label);
6307 expand_builtin_setjmp_receiver (label_r);
6308 return const0_rtx;
6310 break;
6312 /* __builtin_longjmp is passed a pointer to an array of five words.
6313 It's similar to the C library longjmp function but works with
6314 __builtin_setjmp above. */
6315 case BUILT_IN_LONGJMP:
6316 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6318 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6319 VOIDmode, EXPAND_NORMAL);
6320 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6322 if (value != const1_rtx)
6324 error ("%<__builtin_longjmp%> second argument must be 1");
6325 return const0_rtx;
6328 expand_builtin_longjmp (buf_addr, value);
6329 return const0_rtx;
6331 break;
6333 case BUILT_IN_NONLOCAL_GOTO:
6334 target = expand_builtin_nonlocal_goto (exp);
6335 if (target)
6336 return target;
6337 break;
6339 /* This updates the setjmp buffer that is its argument with the value
6340 of the current stack pointer. */
6341 case BUILT_IN_UPDATE_SETJMP_BUF:
6342 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6344 rtx buf_addr
6345 = expand_normal (CALL_EXPR_ARG (exp, 0));
6347 expand_builtin_update_setjmp_buf (buf_addr);
6348 return const0_rtx;
6350 break;
6352 case BUILT_IN_TRAP:
6353 expand_builtin_trap ();
6354 return const0_rtx;
6356 case BUILT_IN_PRINTF:
6357 target = expand_builtin_printf (exp, target, mode, false);
6358 if (target)
6359 return target;
6360 break;
6362 case BUILT_IN_PRINTF_UNLOCKED:
6363 target = expand_builtin_printf (exp, target, mode, true);
6364 if (target)
6365 return target;
6366 break;
6368 case BUILT_IN_FPUTS:
6369 target = expand_builtin_fputs (exp, target, false);
6370 if (target)
6371 return target;
6372 break;
6373 case BUILT_IN_FPUTS_UNLOCKED:
6374 target = expand_builtin_fputs (exp, target, true);
6375 if (target)
6376 return target;
6377 break;
6379 case BUILT_IN_FPRINTF:
6380 target = expand_builtin_fprintf (exp, target, mode, false);
6381 if (target)
6382 return target;
6383 break;
6385 case BUILT_IN_FPRINTF_UNLOCKED:
6386 target = expand_builtin_fprintf (exp, target, mode, true);
6387 if (target)
6388 return target;
6389 break;
6391 case BUILT_IN_SPRINTF:
6392 target = expand_builtin_sprintf (exp, target, mode);
6393 if (target)
6394 return target;
6395 break;
6397 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6398 target = expand_builtin_signbit (exp, target);
6399 if (target)
6400 return target;
6401 break;
6403 /* Various hooks for the DWARF 2 __throw routine. */
6404 case BUILT_IN_UNWIND_INIT:
6405 expand_builtin_unwind_init ();
6406 return const0_rtx;
6407 case BUILT_IN_DWARF_CFA:
6408 return virtual_cfa_rtx;
6409 #ifdef DWARF2_UNWIND_INFO
6410 case BUILT_IN_DWARF_SP_COLUMN:
6411 return expand_builtin_dwarf_sp_column ();
6412 case BUILT_IN_INIT_DWARF_REG_SIZES:
6413 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6414 return const0_rtx;
6415 #endif
6416 case BUILT_IN_FROB_RETURN_ADDR:
6417 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6418 case BUILT_IN_EXTRACT_RETURN_ADDR:
6419 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6420 case BUILT_IN_EH_RETURN:
6421 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6422 CALL_EXPR_ARG (exp, 1));
6423 return const0_rtx;
6424 #ifdef EH_RETURN_DATA_REGNO
6425 case BUILT_IN_EH_RETURN_DATA_REGNO:
6426 return expand_builtin_eh_return_data_regno (exp);
6427 #endif
6428 case BUILT_IN_EXTEND_POINTER:
6429 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6431 case BUILT_IN_VA_START:
6432 case BUILT_IN_STDARG_START:
6433 return expand_builtin_va_start (exp);
6434 case BUILT_IN_VA_END:
6435 return expand_builtin_va_end (exp);
6436 case BUILT_IN_VA_COPY:
6437 return expand_builtin_va_copy (exp);
6438 case BUILT_IN_EXPECT:
6439 return expand_builtin_expect (exp, target);
6440 case BUILT_IN_PREFETCH:
6441 expand_builtin_prefetch (exp);
6442 return const0_rtx;
6444 case BUILT_IN_PROFILE_FUNC_ENTER:
6445 return expand_builtin_profile_func (false);
6446 case BUILT_IN_PROFILE_FUNC_EXIT:
6447 return expand_builtin_profile_func (true);
6449 case BUILT_IN_INIT_TRAMPOLINE:
6450 return expand_builtin_init_trampoline (exp);
6451 case BUILT_IN_ADJUST_TRAMPOLINE:
6452 return expand_builtin_adjust_trampoline (exp);
6454 case BUILT_IN_FORK:
6455 case BUILT_IN_EXECL:
6456 case BUILT_IN_EXECV:
6457 case BUILT_IN_EXECLP:
6458 case BUILT_IN_EXECLE:
6459 case BUILT_IN_EXECVP:
6460 case BUILT_IN_EXECVE:
6461 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6462 if (target)
6463 return target;
6464 break;
6466 case BUILT_IN_FETCH_AND_ADD_1:
6467 case BUILT_IN_FETCH_AND_ADD_2:
6468 case BUILT_IN_FETCH_AND_ADD_4:
6469 case BUILT_IN_FETCH_AND_ADD_8:
6470 case BUILT_IN_FETCH_AND_ADD_16:
6471 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6472 target = expand_builtin_sync_operation (mode, exp, PLUS,
6473 false, target, ignore);
6474 if (target)
6475 return target;
6476 break;
6478 case BUILT_IN_FETCH_AND_SUB_1:
6479 case BUILT_IN_FETCH_AND_SUB_2:
6480 case BUILT_IN_FETCH_AND_SUB_4:
6481 case BUILT_IN_FETCH_AND_SUB_8:
6482 case BUILT_IN_FETCH_AND_SUB_16:
6483 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6484 target = expand_builtin_sync_operation (mode, exp, MINUS,
6485 false, target, ignore);
6486 if (target)
6487 return target;
6488 break;
6490 case BUILT_IN_FETCH_AND_OR_1:
6491 case BUILT_IN_FETCH_AND_OR_2:
6492 case BUILT_IN_FETCH_AND_OR_4:
6493 case BUILT_IN_FETCH_AND_OR_8:
6494 case BUILT_IN_FETCH_AND_OR_16:
6495 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6496 target = expand_builtin_sync_operation (mode, exp, IOR,
6497 false, target, ignore);
6498 if (target)
6499 return target;
6500 break;
6502 case BUILT_IN_FETCH_AND_AND_1:
6503 case BUILT_IN_FETCH_AND_AND_2:
6504 case BUILT_IN_FETCH_AND_AND_4:
6505 case BUILT_IN_FETCH_AND_AND_8:
6506 case BUILT_IN_FETCH_AND_AND_16:
6507 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6508 target = expand_builtin_sync_operation (mode, exp, AND,
6509 false, target, ignore);
6510 if (target)
6511 return target;
6512 break;
6514 case BUILT_IN_FETCH_AND_XOR_1:
6515 case BUILT_IN_FETCH_AND_XOR_2:
6516 case BUILT_IN_FETCH_AND_XOR_4:
6517 case BUILT_IN_FETCH_AND_XOR_8:
6518 case BUILT_IN_FETCH_AND_XOR_16:
6519 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6520 target = expand_builtin_sync_operation (mode, exp, XOR,
6521 false, target, ignore);
6522 if (target)
6523 return target;
6524 break;
6526 case BUILT_IN_FETCH_AND_NAND_1:
6527 case BUILT_IN_FETCH_AND_NAND_2:
6528 case BUILT_IN_FETCH_AND_NAND_4:
6529 case BUILT_IN_FETCH_AND_NAND_8:
6530 case BUILT_IN_FETCH_AND_NAND_16:
6531 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6532 target = expand_builtin_sync_operation (mode, exp, NOT,
6533 false, target, ignore);
6534 if (target)
6535 return target;
6536 break;
6538 case BUILT_IN_ADD_AND_FETCH_1:
6539 case BUILT_IN_ADD_AND_FETCH_2:
6540 case BUILT_IN_ADD_AND_FETCH_4:
6541 case BUILT_IN_ADD_AND_FETCH_8:
6542 case BUILT_IN_ADD_AND_FETCH_16:
6543 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6544 target = expand_builtin_sync_operation (mode, exp, PLUS,
6545 true, target, ignore);
6546 if (target)
6547 return target;
6548 break;
6550 case BUILT_IN_SUB_AND_FETCH_1:
6551 case BUILT_IN_SUB_AND_FETCH_2:
6552 case BUILT_IN_SUB_AND_FETCH_4:
6553 case BUILT_IN_SUB_AND_FETCH_8:
6554 case BUILT_IN_SUB_AND_FETCH_16:
6555 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6556 target = expand_builtin_sync_operation (mode, exp, MINUS,
6557 true, target, ignore);
6558 if (target)
6559 return target;
6560 break;
6562 case BUILT_IN_OR_AND_FETCH_1:
6563 case BUILT_IN_OR_AND_FETCH_2:
6564 case BUILT_IN_OR_AND_FETCH_4:
6565 case BUILT_IN_OR_AND_FETCH_8:
6566 case BUILT_IN_OR_AND_FETCH_16:
6567 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6568 target = expand_builtin_sync_operation (mode, exp, IOR,
6569 true, target, ignore);
6570 if (target)
6571 return target;
6572 break;
6574 case BUILT_IN_AND_AND_FETCH_1:
6575 case BUILT_IN_AND_AND_FETCH_2:
6576 case BUILT_IN_AND_AND_FETCH_4:
6577 case BUILT_IN_AND_AND_FETCH_8:
6578 case BUILT_IN_AND_AND_FETCH_16:
6579 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6580 target = expand_builtin_sync_operation (mode, exp, AND,
6581 true, target, ignore);
6582 if (target)
6583 return target;
6584 break;
6586 case BUILT_IN_XOR_AND_FETCH_1:
6587 case BUILT_IN_XOR_AND_FETCH_2:
6588 case BUILT_IN_XOR_AND_FETCH_4:
6589 case BUILT_IN_XOR_AND_FETCH_8:
6590 case BUILT_IN_XOR_AND_FETCH_16:
6591 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6592 target = expand_builtin_sync_operation (mode, exp, XOR,
6593 true, target, ignore);
6594 if (target)
6595 return target;
6596 break;
6598 case BUILT_IN_NAND_AND_FETCH_1:
6599 case BUILT_IN_NAND_AND_FETCH_2:
6600 case BUILT_IN_NAND_AND_FETCH_4:
6601 case BUILT_IN_NAND_AND_FETCH_8:
6602 case BUILT_IN_NAND_AND_FETCH_16:
6603 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6604 target = expand_builtin_sync_operation (mode, exp, NOT,
6605 true, target, ignore);
6606 if (target)
6607 return target;
6608 break;
6610 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6611 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6612 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6613 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6614 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6615 if (mode == VOIDmode)
6616 mode = TYPE_MODE (boolean_type_node);
6617 if (!target || !register_operand (target, mode))
6618 target = gen_reg_rtx (mode);
6620 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6621 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6622 if (target)
6623 return target;
6624 break;
6626 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6627 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6628 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6629 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6630 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6631 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6632 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6633 if (target)
6634 return target;
6635 break;
6637 case BUILT_IN_LOCK_TEST_AND_SET_1:
6638 case BUILT_IN_LOCK_TEST_AND_SET_2:
6639 case BUILT_IN_LOCK_TEST_AND_SET_4:
6640 case BUILT_IN_LOCK_TEST_AND_SET_8:
6641 case BUILT_IN_LOCK_TEST_AND_SET_16:
6642 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6643 target = expand_builtin_lock_test_and_set (mode, exp, target);
6644 if (target)
6645 return target;
6646 break;
6648 case BUILT_IN_LOCK_RELEASE_1:
6649 case BUILT_IN_LOCK_RELEASE_2:
6650 case BUILT_IN_LOCK_RELEASE_4:
6651 case BUILT_IN_LOCK_RELEASE_8:
6652 case BUILT_IN_LOCK_RELEASE_16:
6653 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6654 expand_builtin_lock_release (mode, exp);
6655 return const0_rtx;
6657 case BUILT_IN_SYNCHRONIZE:
6658 expand_builtin_synchronize ();
6659 return const0_rtx;
6661 case BUILT_IN_OBJECT_SIZE:
6662 return expand_builtin_object_size (exp);
6664 case BUILT_IN_MEMCPY_CHK:
6665 case BUILT_IN_MEMPCPY_CHK:
6666 case BUILT_IN_MEMMOVE_CHK:
6667 case BUILT_IN_MEMSET_CHK:
6668 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6669 if (target)
6670 return target;
6671 break;
6673 case BUILT_IN_STRCPY_CHK:
6674 case BUILT_IN_STPCPY_CHK:
6675 case BUILT_IN_STRNCPY_CHK:
6676 case BUILT_IN_STRCAT_CHK:
6677 case BUILT_IN_STRNCAT_CHK:
6678 case BUILT_IN_SNPRINTF_CHK:
6679 case BUILT_IN_VSNPRINTF_CHK:
6680 maybe_emit_chk_warning (exp, fcode);
6681 break;
6683 case BUILT_IN_SPRINTF_CHK:
6684 case BUILT_IN_VSPRINTF_CHK:
6685 maybe_emit_sprintf_chk_warning (exp, fcode);
6686 break;
6688 default: /* just do library call, if unknown builtin */
6689 break;
6692 /* The switch statement above can drop through to cause the function
6693 to be called normally. */
6694 return expand_call (exp, target, ignore);
6697 /* Determine whether a tree node represents a call to a built-in
6698 function. If the tree T is a call to a built-in function with
6699 the right number of arguments of the appropriate types, return
6700 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6701 Otherwise the return value is END_BUILTINS. */
6703 enum built_in_function
6704 builtin_mathfn_code (tree t)
6706 tree fndecl, arg, parmlist;
6707 tree argtype, parmtype;
6708 call_expr_arg_iterator iter;
6710 if (TREE_CODE (t) != CALL_EXPR
6711 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6712 return END_BUILTINS;
6714 fndecl = get_callee_fndecl (t);
6715 if (fndecl == NULL_TREE
6716 || TREE_CODE (fndecl) != FUNCTION_DECL
6717 || ! DECL_BUILT_IN (fndecl)
6718 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6719 return END_BUILTINS;
6721 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6722 init_call_expr_arg_iterator (t, &iter);
6723 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6725 /* If a function doesn't take a variable number of arguments,
6726 the last element in the list will have type `void'. */
6727 parmtype = TREE_VALUE (parmlist);
6728 if (VOID_TYPE_P (parmtype))
6730 if (more_call_expr_args_p (&iter))
6731 return END_BUILTINS;
6732 return DECL_FUNCTION_CODE (fndecl);
6735 if (! more_call_expr_args_p (&iter))
6736 return END_BUILTINS;
6738 arg = next_call_expr_arg (&iter);
6739 argtype = TREE_TYPE (arg);
6741 if (SCALAR_FLOAT_TYPE_P (parmtype))
6743 if (! SCALAR_FLOAT_TYPE_P (argtype))
6744 return END_BUILTINS;
6746 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6748 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6749 return END_BUILTINS;
6751 else if (POINTER_TYPE_P (parmtype))
6753 if (! POINTER_TYPE_P (argtype))
6754 return END_BUILTINS;
6756 else if (INTEGRAL_TYPE_P (parmtype))
6758 if (! INTEGRAL_TYPE_P (argtype))
6759 return END_BUILTINS;
6761 else
6762 return END_BUILTINS;
6765 /* Variable-length argument list. */
6766 return DECL_FUNCTION_CODE (fndecl);
6769 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6770 evaluate to a constant. */
6772 static tree
6773 fold_builtin_constant_p (tree arg)
6775 /* We return 1 for a numeric type that's known to be a constant
6776 value at compile-time or for an aggregate type that's a
6777 literal constant. */
6778 STRIP_NOPS (arg);
6780 /* If we know this is a constant, emit the constant of one. */
6781 if (CONSTANT_CLASS_P (arg)
6782 || (TREE_CODE (arg) == CONSTRUCTOR
6783 && TREE_CONSTANT (arg)))
6784 return integer_one_node;
6785 if (TREE_CODE (arg) == ADDR_EXPR)
6787 tree op = TREE_OPERAND (arg, 0);
6788 if (TREE_CODE (op) == STRING_CST
6789 || (TREE_CODE (op) == ARRAY_REF
6790 && integer_zerop (TREE_OPERAND (op, 1))
6791 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6792 return integer_one_node;
6795 /* If this expression has side effects, show we don't know it to be a
6796 constant. Likewise if it's a pointer or aggregate type since in
6797 those case we only want literals, since those are only optimized
6798 when generating RTL, not later.
6799 And finally, if we are compiling an initializer, not code, we
6800 need to return a definite result now; there's not going to be any
6801 more optimization done. */
6802 if (TREE_SIDE_EFFECTS (arg)
6803 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6804 || POINTER_TYPE_P (TREE_TYPE (arg))
6805 || cfun == 0
6806 || folding_initializer)
6807 return integer_zero_node;
6809 return NULL_TREE;
6812 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
6813 comparison against the argument will fold to a constant. In practice,
6814 this means a true constant or the address of a non-weak symbol. */
6816 static tree
6817 fold_builtin_expect (tree arg)
6819 tree inner;
6821 /* If the argument isn't invariant, then there's nothing we can do. */
6822 if (!TREE_INVARIANT (arg))
6823 return NULL_TREE;
6825 /* If we're looking at an address of a weak decl, then do not fold. */
6826 inner = arg;
6827 STRIP_NOPS (inner);
6828 if (TREE_CODE (inner) == ADDR_EXPR)
6832 inner = TREE_OPERAND (inner, 0);
6834 while (TREE_CODE (inner) == COMPONENT_REF
6835 || TREE_CODE (inner) == ARRAY_REF);
6836 if (DECL_P (inner) && DECL_WEAK (inner))
6837 return NULL_TREE;
6840 /* Otherwise, ARG already has the proper type for the return value. */
6841 return arg;
6844 /* Fold a call to __builtin_classify_type with argument ARG. */
6846 static tree
6847 fold_builtin_classify_type (tree arg)
6849 if (arg == 0)
6850 return build_int_cst (NULL_TREE, no_type_class);
6852 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6855 /* Fold a call to __builtin_strlen with argument ARG. */
6857 static tree
6858 fold_builtin_strlen (tree arg)
6860 if (!validate_arg (arg, POINTER_TYPE))
6861 return NULL_TREE;
6862 else
6864 tree len = c_strlen (arg, 0);
6866 if (len)
6868 /* Convert from the internal "sizetype" type to "size_t". */
6869 if (size_type_node)
6870 len = fold_convert (size_type_node, len);
6871 return len;
6874 return NULL_TREE;
6878 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6880 static tree
6881 fold_builtin_inf (tree type, int warn)
6883 REAL_VALUE_TYPE real;
6885 /* __builtin_inff is intended to be usable to define INFINITY on all
6886 targets. If an infinity is not available, INFINITY expands "to a
6887 positive constant of type float that overflows at translation
6888 time", footnote "In this case, using INFINITY will violate the
6889 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6890 Thus we pedwarn to ensure this constraint violation is
6891 diagnosed. */
6892 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6893 pedwarn ("target format does not support infinity");
6895 real_inf (&real);
6896 return build_real (type, real);
6899 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6901 static tree
6902 fold_builtin_nan (tree arg, tree type, int quiet)
6904 REAL_VALUE_TYPE real;
6905 const char *str;
6907 if (!validate_arg (arg, POINTER_TYPE))
6908 return NULL_TREE;
6909 str = c_getstr (arg);
6910 if (!str)
6911 return NULL_TREE;
6913 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6914 return NULL_TREE;
6916 return build_real (type, real);
6919 /* Return true if the floating point expression T has an integer value.
6920 We also allow +Inf, -Inf and NaN to be considered integer values. */
6922 static bool
6923 integer_valued_real_p (tree t)
6925 switch (TREE_CODE (t))
6927 case FLOAT_EXPR:
6928 return true;
6930 case ABS_EXPR:
6931 case SAVE_EXPR:
6932 case NON_LVALUE_EXPR:
6933 return integer_valued_real_p (TREE_OPERAND (t, 0));
6935 case COMPOUND_EXPR:
6936 case MODIFY_EXPR:
6937 case BIND_EXPR:
6938 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
6940 case PLUS_EXPR:
6941 case MINUS_EXPR:
6942 case MULT_EXPR:
6943 case MIN_EXPR:
6944 case MAX_EXPR:
6945 return integer_valued_real_p (TREE_OPERAND (t, 0))
6946 && integer_valued_real_p (TREE_OPERAND (t, 1));
6948 case COND_EXPR:
6949 return integer_valued_real_p (TREE_OPERAND (t, 1))
6950 && integer_valued_real_p (TREE_OPERAND (t, 2));
6952 case REAL_CST:
6953 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6955 case NOP_EXPR:
6957 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6958 if (TREE_CODE (type) == INTEGER_TYPE)
6959 return true;
6960 if (TREE_CODE (type) == REAL_TYPE)
6961 return integer_valued_real_p (TREE_OPERAND (t, 0));
6962 break;
6965 case CALL_EXPR:
6966 switch (builtin_mathfn_code (t))
6968 CASE_FLT_FN (BUILT_IN_CEIL):
6969 CASE_FLT_FN (BUILT_IN_FLOOR):
6970 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6971 CASE_FLT_FN (BUILT_IN_RINT):
6972 CASE_FLT_FN (BUILT_IN_ROUND):
6973 CASE_FLT_FN (BUILT_IN_TRUNC):
6974 return true;
6976 CASE_FLT_FN (BUILT_IN_FMIN):
6977 CASE_FLT_FN (BUILT_IN_FMAX):
6978 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6979 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6981 default:
6982 break;
6984 break;
6986 default:
6987 break;
6989 return false;
6992 /* FNDECL is assumed to be a builtin where truncation can be propagated
6993 across (for instance floor((double)f) == (double)floorf (f).
6994 Do the transformation for a call with argument ARG. */
6996 static tree
6997 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
6999 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7001 if (!validate_arg (arg, REAL_TYPE))
7002 return NULL_TREE;
7004 /* Integer rounding functions are idempotent. */
7005 if (fcode == builtin_mathfn_code (arg))
7006 return arg;
7008 /* If argument is already integer valued, and we don't need to worry
7009 about setting errno, there's no need to perform rounding. */
7010 if (! flag_errno_math && integer_valued_real_p (arg))
7011 return arg;
7013 if (optimize)
7015 tree arg0 = strip_float_extensions (arg);
7016 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7017 tree newtype = TREE_TYPE (arg0);
7018 tree decl;
7020 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7021 && (decl = mathfn_built_in (newtype, fcode)))
7022 return fold_convert (ftype,
7023 build_call_expr (decl, 1,
7024 fold_convert (newtype, arg0)));
7026 return NULL_TREE;
7029 /* FNDECL is assumed to be builtin which can narrow the FP type of
7030 the argument, for instance lround((double)f) -> lroundf (f).
7031 Do the transformation for a call with argument ARG. */
7033 static tree
7034 fold_fixed_mathfn (tree fndecl, tree arg)
7036 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7038 if (!validate_arg (arg, REAL_TYPE))
7039 return NULL_TREE;
7041 /* If argument is already integer valued, and we don't need to worry
7042 about setting errno, there's no need to perform rounding. */
7043 if (! flag_errno_math && integer_valued_real_p (arg))
7044 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7046 if (optimize)
7048 tree ftype = TREE_TYPE (arg);
7049 tree arg0 = strip_float_extensions (arg);
7050 tree newtype = TREE_TYPE (arg0);
7051 tree decl;
7053 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7054 && (decl = mathfn_built_in (newtype, fcode)))
7055 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7058 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7059 sizeof (long long) == sizeof (long). */
7060 if (TYPE_PRECISION (long_long_integer_type_node)
7061 == TYPE_PRECISION (long_integer_type_node))
7063 tree newfn = NULL_TREE;
7064 switch (fcode)
7066 CASE_FLT_FN (BUILT_IN_LLCEIL):
7067 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7068 break;
7070 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7071 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7072 break;
7074 CASE_FLT_FN (BUILT_IN_LLROUND):
7075 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7076 break;
7078 CASE_FLT_FN (BUILT_IN_LLRINT):
7079 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7080 break;
7082 default:
7083 break;
7086 if (newfn)
7088 tree newcall = build_call_expr(newfn, 1, arg);
7089 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7093 return NULL_TREE;
7096 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7097 return type. Return NULL_TREE if no simplification can be made. */
7099 static tree
7100 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7102 tree res;
7104 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7105 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7106 return NULL_TREE;
7108 /* Calculate the result when the argument is a constant. */
7109 if (TREE_CODE (arg) == COMPLEX_CST
7110 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7111 type, mpfr_hypot)))
7112 return res;
7114 if (TREE_CODE (arg) == COMPLEX_EXPR)
7116 tree real = TREE_OPERAND (arg, 0);
7117 tree imag = TREE_OPERAND (arg, 1);
7119 /* If either part is zero, cabs is fabs of the other. */
7120 if (real_zerop (real))
7121 return fold_build1 (ABS_EXPR, type, imag);
7122 if (real_zerop (imag))
7123 return fold_build1 (ABS_EXPR, type, real);
7125 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7126 if (flag_unsafe_math_optimizations
7127 && operand_equal_p (real, imag, OEP_PURE_SAME))
7129 const REAL_VALUE_TYPE sqrt2_trunc
7130 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7131 STRIP_NOPS (real);
7132 return fold_build2 (MULT_EXPR, type,
7133 fold_build1 (ABS_EXPR, type, real),
7134 build_real (type, sqrt2_trunc));
7138 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7139 if (TREE_CODE (arg) == NEGATE_EXPR
7140 || TREE_CODE (arg) == CONJ_EXPR)
7141 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7143 /* Don't do this when optimizing for size. */
7144 if (flag_unsafe_math_optimizations
7145 && optimize && !optimize_size)
7147 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7149 if (sqrtfn != NULL_TREE)
7151 tree rpart, ipart, result;
7153 arg = builtin_save_expr (arg);
7155 rpart = fold_build1 (REALPART_EXPR, type, arg);
7156 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7158 rpart = builtin_save_expr (rpart);
7159 ipart = builtin_save_expr (ipart);
7161 result = fold_build2 (PLUS_EXPR, type,
7162 fold_build2 (MULT_EXPR, type,
7163 rpart, rpart),
7164 fold_build2 (MULT_EXPR, type,
7165 ipart, ipart));
7167 return build_call_expr (sqrtfn, 1, result);
7171 return NULL_TREE;
7174 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7175 Return NULL_TREE if no simplification can be made. */
7177 static tree
7178 fold_builtin_sqrt (tree arg, tree type)
7181 enum built_in_function fcode;
7182 tree res;
7184 if (!validate_arg (arg, REAL_TYPE))
7185 return NULL_TREE;
7187 /* Calculate the result when the argument is a constant. */
7188 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7189 return res;
7191 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7192 fcode = builtin_mathfn_code (arg);
7193 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7195 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7196 arg = fold_build2 (MULT_EXPR, type,
7197 CALL_EXPR_ARG (arg, 0),
7198 build_real (type, dconsthalf));
7199 return build_call_expr (expfn, 1, arg);
7202 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7203 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7205 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7207 if (powfn)
7209 tree arg0 = CALL_EXPR_ARG (arg, 0);
7210 tree tree_root;
7211 /* The inner root was either sqrt or cbrt. */
7212 REAL_VALUE_TYPE dconstroot =
7213 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
7215 /* Adjust for the outer root. */
7216 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7217 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7218 tree_root = build_real (type, dconstroot);
7219 return build_call_expr (powfn, 2, arg0, tree_root);
7223 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7224 if (flag_unsafe_math_optimizations
7225 && (fcode == BUILT_IN_POW
7226 || fcode == BUILT_IN_POWF
7227 || fcode == BUILT_IN_POWL))
7229 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7230 tree arg0 = CALL_EXPR_ARG (arg, 0);
7231 tree arg1 = CALL_EXPR_ARG (arg, 1);
7232 tree narg1;
7233 if (!tree_expr_nonnegative_p (arg0))
7234 arg0 = build1 (ABS_EXPR, type, arg0);
7235 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7236 build_real (type, dconsthalf));
7237 return build_call_expr (powfn, 2, arg0, narg1);
7240 return NULL_TREE;
7243 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7244 Return NULL_TREE if no simplification can be made. */
7246 static tree
7247 fold_builtin_cbrt (tree arg, tree type)
7249 const enum built_in_function fcode = builtin_mathfn_code (arg);
7250 tree res;
7252 if (!validate_arg (arg, REAL_TYPE))
7253 return NULL_TREE;
7255 /* Calculate the result when the argument is a constant. */
7256 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7257 return res;
7259 if (flag_unsafe_math_optimizations)
7261 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7262 if (BUILTIN_EXPONENT_P (fcode))
7264 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7265 const REAL_VALUE_TYPE third_trunc =
7266 real_value_truncate (TYPE_MODE (type), dconstthird);
7267 arg = fold_build2 (MULT_EXPR, type,
7268 CALL_EXPR_ARG (arg, 0),
7269 build_real (type, third_trunc));
7270 return build_call_expr (expfn, 1, arg);
7273 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7274 if (BUILTIN_SQRT_P (fcode))
7276 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7278 if (powfn)
7280 tree arg0 = CALL_EXPR_ARG (arg, 0);
7281 tree tree_root;
7282 REAL_VALUE_TYPE dconstroot = dconstthird;
7284 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7285 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7286 tree_root = build_real (type, dconstroot);
7287 return build_call_expr (powfn, 2, arg0, tree_root);
7291 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7292 if (BUILTIN_CBRT_P (fcode))
7294 tree arg0 = CALL_EXPR_ARG (arg, 0);
7295 if (tree_expr_nonnegative_p (arg0))
7297 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7299 if (powfn)
7301 tree tree_root;
7302 REAL_VALUE_TYPE dconstroot;
7304 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
7305 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7306 tree_root = build_real (type, dconstroot);
7307 return build_call_expr (powfn, 2, arg0, tree_root);
7312 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7313 if (fcode == BUILT_IN_POW
7314 || fcode == BUILT_IN_POWF
7315 || fcode == BUILT_IN_POWL)
7317 tree arg00 = CALL_EXPR_ARG (arg, 0);
7318 tree arg01 = CALL_EXPR_ARG (arg, 1);
7319 if (tree_expr_nonnegative_p (arg00))
7321 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7322 const REAL_VALUE_TYPE dconstroot
7323 = real_value_truncate (TYPE_MODE (type), dconstthird);
7324 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7325 build_real (type, dconstroot));
7326 return build_call_expr (powfn, 2, arg00, narg01);
7330 return NULL_TREE;
7333 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7334 TYPE is the type of the return value. Return NULL_TREE if no
7335 simplification can be made. */
7337 static tree
7338 fold_builtin_cos (tree arg, tree type, tree fndecl)
7340 tree res, narg;
7342 if (!validate_arg (arg, REAL_TYPE))
7343 return NULL_TREE;
7345 /* Calculate the result when the argument is a constant. */
7346 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7347 return res;
7349 /* Optimize cos(-x) into cos (x). */
7350 if ((narg = fold_strip_sign_ops (arg)))
7351 return build_call_expr (fndecl, 1, narg);
7353 return NULL_TREE;
7356 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7357 Return NULL_TREE if no simplification can be made. */
7359 static tree
7360 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7362 if (validate_arg (arg, REAL_TYPE))
7364 tree res, narg;
7366 /* Calculate the result when the argument is a constant. */
7367 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7368 return res;
7370 /* Optimize cosh(-x) into cosh (x). */
7371 if ((narg = fold_strip_sign_ops (arg)))
7372 return build_call_expr (fndecl, 1, narg);
7375 return NULL_TREE;
7378 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7379 Return NULL_TREE if no simplification can be made. */
7381 static tree
7382 fold_builtin_tan (tree arg, tree type)
7384 enum built_in_function fcode;
7385 tree res;
7387 if (!validate_arg (arg, REAL_TYPE))
7388 return NULL_TREE;
7390 /* Calculate the result when the argument is a constant. */
7391 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7392 return res;
7394 /* Optimize tan(atan(x)) = x. */
7395 fcode = builtin_mathfn_code (arg);
7396 if (flag_unsafe_math_optimizations
7397 && (fcode == BUILT_IN_ATAN
7398 || fcode == BUILT_IN_ATANF
7399 || fcode == BUILT_IN_ATANL))
7400 return CALL_EXPR_ARG (arg, 0);
7402 return NULL_TREE;
7405 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7406 NULL_TREE if no simplification can be made. */
7408 static tree
7409 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7411 tree type;
7412 tree res, fn, call;
7414 if (!validate_arg (arg0, REAL_TYPE)
7415 || !validate_arg (arg1, POINTER_TYPE)
7416 || !validate_arg (arg2, POINTER_TYPE))
7417 return NULL_TREE;
7419 type = TREE_TYPE (arg0);
7421 /* Calculate the result when the argument is a constant. */
7422 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7423 return res;
7425 /* Canonicalize sincos to cexpi. */
7426 if (!TARGET_C99_FUNCTIONS)
7427 return NULL_TREE;
7428 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7429 if (!fn)
7430 return NULL_TREE;
7432 call = build_call_expr (fn, 1, arg0);
7433 call = builtin_save_expr (call);
7435 return build2 (COMPOUND_EXPR, type,
7436 build2 (MODIFY_EXPR, void_type_node,
7437 build_fold_indirect_ref (arg1),
7438 build1 (IMAGPART_EXPR, type, call)),
7439 build2 (MODIFY_EXPR, void_type_node,
7440 build_fold_indirect_ref (arg2),
7441 build1 (REALPART_EXPR, type, call)));
7444 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7445 NULL_TREE if no simplification can be made. */
7447 static tree
7448 fold_builtin_cexp (tree arg0, tree type)
7450 tree rtype;
7451 tree realp, imagp, ifn;
7453 if (!validate_arg (arg0, COMPLEX_TYPE))
7454 return NULL_TREE;
7456 rtype = TREE_TYPE (TREE_TYPE (arg0));
7458 /* In case we can figure out the real part of arg0 and it is constant zero
7459 fold to cexpi. */
7460 if (!TARGET_C99_FUNCTIONS)
7461 return NULL_TREE;
7462 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7463 if (!ifn)
7464 return NULL_TREE;
7466 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7467 && real_zerop (realp))
7469 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7470 return build_call_expr (ifn, 1, narg);
7473 /* In case we can easily decompose real and imaginary parts split cexp
7474 to exp (r) * cexpi (i). */
7475 if (flag_unsafe_math_optimizations
7476 && realp)
7478 tree rfn, rcall, icall;
7480 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7481 if (!rfn)
7482 return NULL_TREE;
7484 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7485 if (!imagp)
7486 return NULL_TREE;
7488 icall = build_call_expr (ifn, 1, imagp);
7489 icall = builtin_save_expr (icall);
7490 rcall = build_call_expr (rfn, 1, realp);
7491 rcall = builtin_save_expr (rcall);
7492 return build2 (COMPLEX_EXPR, type,
7493 build2 (MULT_EXPR, rtype,
7494 rcall,
7495 build1 (REALPART_EXPR, rtype, icall)),
7496 build2 (MULT_EXPR, rtype,
7497 rcall,
7498 build1 (IMAGPART_EXPR, rtype, icall)));
7501 return NULL_TREE;
7504 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7505 Return NULL_TREE if no simplification can be made. */
7507 static tree
7508 fold_builtin_trunc (tree fndecl, tree arg)
7510 if (!validate_arg (arg, REAL_TYPE))
7511 return NULL_TREE;
7513 /* Optimize trunc of constant value. */
7514 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7516 REAL_VALUE_TYPE r, x;
7517 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7519 x = TREE_REAL_CST (arg);
7520 real_trunc (&r, TYPE_MODE (type), &x);
7521 return build_real (type, r);
7524 return fold_trunc_transparent_mathfn (fndecl, arg);
7527 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7528 Return NULL_TREE if no simplification can be made. */
7530 static tree
7531 fold_builtin_floor (tree fndecl, tree arg)
7533 if (!validate_arg (arg, REAL_TYPE))
7534 return NULL_TREE;
7536 /* Optimize floor of constant value. */
7537 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7539 REAL_VALUE_TYPE x;
7541 x = TREE_REAL_CST (arg);
7542 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7544 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7545 REAL_VALUE_TYPE r;
7547 real_floor (&r, TYPE_MODE (type), &x);
7548 return build_real (type, r);
7552 /* Fold floor (x) where x is nonnegative to trunc (x). */
7553 if (tree_expr_nonnegative_p (arg))
7555 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7556 if (truncfn)
7557 return build_call_expr (truncfn, 1, arg);
7560 return fold_trunc_transparent_mathfn (fndecl, arg);
7563 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7564 Return NULL_TREE if no simplification can be made. */
7566 static tree
7567 fold_builtin_ceil (tree fndecl, tree arg)
7569 if (!validate_arg (arg, REAL_TYPE))
7570 return NULL_TREE;
7572 /* Optimize ceil of constant value. */
7573 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7575 REAL_VALUE_TYPE x;
7577 x = TREE_REAL_CST (arg);
7578 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7580 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7581 REAL_VALUE_TYPE r;
7583 real_ceil (&r, TYPE_MODE (type), &x);
7584 return build_real (type, r);
7588 return fold_trunc_transparent_mathfn (fndecl, arg);
7591 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7592 Return NULL_TREE if no simplification can be made. */
7594 static tree
7595 fold_builtin_round (tree fndecl, tree arg)
7597 if (!validate_arg (arg, REAL_TYPE))
7598 return NULL_TREE;
7600 /* Optimize round of constant value. */
7601 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7603 REAL_VALUE_TYPE x;
7605 x = TREE_REAL_CST (arg);
7606 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7608 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7609 REAL_VALUE_TYPE r;
7611 real_round (&r, TYPE_MODE (type), &x);
7612 return build_real (type, r);
7616 return fold_trunc_transparent_mathfn (fndecl, arg);
7619 /* Fold function call to builtin lround, lroundf or lroundl (or the
7620 corresponding long long versions) and other rounding functions. ARG
7621 is the argument to the call. Return NULL_TREE if no simplification
7622 can be made. */
7624 static tree
7625 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7627 if (!validate_arg (arg, REAL_TYPE))
7628 return NULL_TREE;
7630 /* Optimize lround of constant value. */
7631 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7633 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7635 if (! REAL_VALUE_ISNAN (x) && ! REAL_VALUE_ISINF (x))
7637 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7638 tree ftype = TREE_TYPE (arg);
7639 unsigned HOST_WIDE_INT lo2;
7640 HOST_WIDE_INT hi, lo;
7641 REAL_VALUE_TYPE r;
7643 switch (DECL_FUNCTION_CODE (fndecl))
7645 CASE_FLT_FN (BUILT_IN_LFLOOR):
7646 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7647 real_floor (&r, TYPE_MODE (ftype), &x);
7648 break;
7650 CASE_FLT_FN (BUILT_IN_LCEIL):
7651 CASE_FLT_FN (BUILT_IN_LLCEIL):
7652 real_ceil (&r, TYPE_MODE (ftype), &x);
7653 break;
7655 CASE_FLT_FN (BUILT_IN_LROUND):
7656 CASE_FLT_FN (BUILT_IN_LLROUND):
7657 real_round (&r, TYPE_MODE (ftype), &x);
7658 break;
7660 default:
7661 gcc_unreachable ();
7664 REAL_VALUE_TO_INT (&lo, &hi, r);
7665 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7666 return build_int_cst_wide (itype, lo2, hi);
7670 switch (DECL_FUNCTION_CODE (fndecl))
7672 CASE_FLT_FN (BUILT_IN_LFLOOR):
7673 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7674 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7675 if (tree_expr_nonnegative_p (arg))
7676 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7677 arg);
7678 break;
7679 default:;
7682 return fold_fixed_mathfn (fndecl, arg);
7685 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7686 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7687 the argument to the call. Return NULL_TREE if no simplification can
7688 be made. */
7690 static tree
7691 fold_builtin_bitop (tree fndecl, tree arg)
7693 if (!validate_arg (arg, INTEGER_TYPE))
7694 return NULL_TREE;
7696 /* Optimize for constant argument. */
7697 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7699 HOST_WIDE_INT hi, width, result;
7700 unsigned HOST_WIDE_INT lo;
7701 tree type;
7703 type = TREE_TYPE (arg);
7704 width = TYPE_PRECISION (type);
7705 lo = TREE_INT_CST_LOW (arg);
7707 /* Clear all the bits that are beyond the type's precision. */
7708 if (width > HOST_BITS_PER_WIDE_INT)
7710 hi = TREE_INT_CST_HIGH (arg);
7711 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7712 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7714 else
7716 hi = 0;
7717 if (width < HOST_BITS_PER_WIDE_INT)
7718 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7721 switch (DECL_FUNCTION_CODE (fndecl))
7723 CASE_INT_FN (BUILT_IN_FFS):
7724 if (lo != 0)
7725 result = exact_log2 (lo & -lo) + 1;
7726 else if (hi != 0)
7727 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7728 else
7729 result = 0;
7730 break;
7732 CASE_INT_FN (BUILT_IN_CLZ):
7733 if (hi != 0)
7734 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7735 else if (lo != 0)
7736 result = width - floor_log2 (lo) - 1;
7737 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7738 result = width;
7739 break;
7741 CASE_INT_FN (BUILT_IN_CTZ):
7742 if (lo != 0)
7743 result = exact_log2 (lo & -lo);
7744 else if (hi != 0)
7745 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7746 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7747 result = width;
7748 break;
7750 CASE_INT_FN (BUILT_IN_POPCOUNT):
7751 result = 0;
7752 while (lo)
7753 result++, lo &= lo - 1;
7754 while (hi)
7755 result++, hi &= hi - 1;
7756 break;
7758 CASE_INT_FN (BUILT_IN_PARITY):
7759 result = 0;
7760 while (lo)
7761 result++, lo &= lo - 1;
7762 while (hi)
7763 result++, hi &= hi - 1;
7764 result &= 1;
7765 break;
7767 default:
7768 gcc_unreachable ();
7771 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7774 return NULL_TREE;
7777 /* Fold function call to builtin_bswap and the long and long long
7778 variants. Return NULL_TREE if no simplification can be made. */
7779 static tree
7780 fold_builtin_bswap (tree fndecl, tree arg)
7782 if (! validate_arg (arg, INTEGER_TYPE))
7783 return NULL_TREE;
7785 /* Optimize constant value. */
7786 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7788 HOST_WIDE_INT hi, width, r_hi = 0;
7789 unsigned HOST_WIDE_INT lo, r_lo = 0;
7790 tree type;
7792 type = TREE_TYPE (arg);
7793 width = TYPE_PRECISION (type);
7794 lo = TREE_INT_CST_LOW (arg);
7795 hi = TREE_INT_CST_HIGH (arg);
7797 switch (DECL_FUNCTION_CODE (fndecl))
7799 case BUILT_IN_BSWAP32:
7800 case BUILT_IN_BSWAP64:
7802 int s;
7804 for (s = 0; s < width; s += 8)
7806 int d = width - s - 8;
7807 unsigned HOST_WIDE_INT byte;
7809 if (s < HOST_BITS_PER_WIDE_INT)
7810 byte = (lo >> s) & 0xff;
7811 else
7812 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7814 if (d < HOST_BITS_PER_WIDE_INT)
7815 r_lo |= byte << d;
7816 else
7817 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7821 break;
7823 default:
7824 gcc_unreachable ();
7827 if (width < HOST_BITS_PER_WIDE_INT)
7828 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7829 else
7830 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7833 return NULL_TREE;
7836 /* Return true if EXPR is the real constant contained in VALUE. */
7838 static bool
7839 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
7841 STRIP_NOPS (expr);
7843 return ((TREE_CODE (expr) == REAL_CST
7844 && !TREE_OVERFLOW (expr)
7845 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
7846 || (TREE_CODE (expr) == COMPLEX_CST
7847 && real_dconstp (TREE_REALPART (expr), value)
7848 && real_zerop (TREE_IMAGPART (expr))));
7851 /* A subroutine of fold_builtin to fold the various logarithmic
7852 functions. Return NULL_TREE if no simplification can me made.
7853 FUNC is the corresponding MPFR logarithm function. */
7855 static tree
7856 fold_builtin_logarithm (tree fndecl, tree arg,
7857 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7859 if (validate_arg (arg, REAL_TYPE))
7861 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7862 tree res;
7863 const enum built_in_function fcode = builtin_mathfn_code (arg);
7865 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
7866 instead we'll look for 'e' truncated to MODE. So only do
7867 this if flag_unsafe_math_optimizations is set. */
7868 if (flag_unsafe_math_optimizations && func == mpfr_log)
7870 const REAL_VALUE_TYPE e_truncated =
7871 real_value_truncate (TYPE_MODE (type), dconste);
7872 if (real_dconstp (arg, &e_truncated))
7873 return build_real (type, dconst1);
7876 /* Calculate the result when the argument is a constant. */
7877 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7878 return res;
7880 /* Special case, optimize logN(expN(x)) = x. */
7881 if (flag_unsafe_math_optimizations
7882 && ((func == mpfr_log
7883 && (fcode == BUILT_IN_EXP
7884 || fcode == BUILT_IN_EXPF
7885 || fcode == BUILT_IN_EXPL))
7886 || (func == mpfr_log2
7887 && (fcode == BUILT_IN_EXP2
7888 || fcode == BUILT_IN_EXP2F
7889 || fcode == BUILT_IN_EXP2L))
7890 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7891 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
7893 /* Optimize logN(func()) for various exponential functions. We
7894 want to determine the value "x" and the power "exponent" in
7895 order to transform logN(x**exponent) into exponent*logN(x). */
7896 if (flag_unsafe_math_optimizations)
7898 tree exponent = 0, x = 0;
7900 switch (fcode)
7902 CASE_FLT_FN (BUILT_IN_EXP):
7903 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7904 x = build_real (type,
7905 real_value_truncate (TYPE_MODE (type), dconste));
7906 exponent = CALL_EXPR_ARG (arg, 0);
7907 break;
7908 CASE_FLT_FN (BUILT_IN_EXP2):
7909 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7910 x = build_real (type, dconst2);
7911 exponent = CALL_EXPR_ARG (arg, 0);
7912 break;
7913 CASE_FLT_FN (BUILT_IN_EXP10):
7914 CASE_FLT_FN (BUILT_IN_POW10):
7915 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7916 x = build_real (type, dconst10);
7917 exponent = CALL_EXPR_ARG (arg, 0);
7918 break;
7919 CASE_FLT_FN (BUILT_IN_SQRT):
7920 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7921 x = CALL_EXPR_ARG (arg, 0);
7922 exponent = build_real (type, dconsthalf);
7923 break;
7924 CASE_FLT_FN (BUILT_IN_CBRT):
7925 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7926 x = CALL_EXPR_ARG (arg, 0);
7927 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7928 dconstthird));
7929 break;
7930 CASE_FLT_FN (BUILT_IN_POW):
7931 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7932 x = CALL_EXPR_ARG (arg, 0);
7933 exponent = CALL_EXPR_ARG (arg, 1);
7934 break;
7935 default:
7936 break;
7939 /* Now perform the optimization. */
7940 if (x && exponent)
7942 tree logfn = build_call_expr (fndecl, 1, x);
7943 return fold_build2 (MULT_EXPR, type, exponent, logfn);
7948 return NULL_TREE;
7951 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7952 NULL_TREE if no simplification can be made. */
7954 static tree
7955 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
7957 tree res, narg0, narg1;
7959 if (!validate_arg (arg0, REAL_TYPE)
7960 || !validate_arg (arg1, REAL_TYPE))
7961 return NULL_TREE;
7963 /* Calculate the result when the argument is a constant. */
7964 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7965 return res;
7967 /* If either argument to hypot has a negate or abs, strip that off.
7968 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7969 narg0 = fold_strip_sign_ops (arg0);
7970 narg1 = fold_strip_sign_ops (arg1);
7971 if (narg0 || narg1)
7973 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
7974 narg1 ? narg1 : arg1);
7977 /* If either argument is zero, hypot is fabs of the other. */
7978 if (real_zerop (arg0))
7979 return fold_build1 (ABS_EXPR, type, arg1);
7980 else if (real_zerop (arg1))
7981 return fold_build1 (ABS_EXPR, type, arg0);
7983 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7984 if (flag_unsafe_math_optimizations
7985 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7987 const REAL_VALUE_TYPE sqrt2_trunc
7988 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7989 return fold_build2 (MULT_EXPR, type,
7990 fold_build1 (ABS_EXPR, type, arg0),
7991 build_real (type, sqrt2_trunc));
7994 return NULL_TREE;
7998 /* Fold a builtin function call to pow, powf, or powl. Return
7999 NULL_TREE if no simplification can be made. */
8000 static tree
8001 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8003 tree res;
8005 if (!validate_arg (arg0, REAL_TYPE)
8006 || !validate_arg (arg1, REAL_TYPE))
8007 return NULL_TREE;
8009 /* Calculate the result when the argument is a constant. */
8010 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8011 return res;
8013 /* Optimize pow(1.0,y) = 1.0. */
8014 if (real_onep (arg0))
8015 return omit_one_operand (type, build_real (type, dconst1), arg1);
8017 if (TREE_CODE (arg1) == REAL_CST
8018 && !TREE_OVERFLOW (arg1))
8020 REAL_VALUE_TYPE cint;
8021 REAL_VALUE_TYPE c;
8022 HOST_WIDE_INT n;
8024 c = TREE_REAL_CST (arg1);
8026 /* Optimize pow(x,0.0) = 1.0. */
8027 if (REAL_VALUES_EQUAL (c, dconst0))
8028 return omit_one_operand (type, build_real (type, dconst1),
8029 arg0);
8031 /* Optimize pow(x,1.0) = x. */
8032 if (REAL_VALUES_EQUAL (c, dconst1))
8033 return arg0;
8035 /* Optimize pow(x,-1.0) = 1.0/x. */
8036 if (REAL_VALUES_EQUAL (c, dconstm1))
8037 return fold_build2 (RDIV_EXPR, type,
8038 build_real (type, dconst1), arg0);
8040 /* Optimize pow(x,0.5) = sqrt(x). */
8041 if (flag_unsafe_math_optimizations
8042 && REAL_VALUES_EQUAL (c, dconsthalf))
8044 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8046 if (sqrtfn != NULL_TREE)
8047 return build_call_expr (sqrtfn, 1, arg0);
8050 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8051 if (flag_unsafe_math_optimizations)
8053 const REAL_VALUE_TYPE dconstroot
8054 = real_value_truncate (TYPE_MODE (type), dconstthird);
8056 if (REAL_VALUES_EQUAL (c, dconstroot))
8058 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8059 if (cbrtfn != NULL_TREE)
8060 return build_call_expr (cbrtfn, 1, arg0);
8064 /* Check for an integer exponent. */
8065 n = real_to_integer (&c);
8066 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8067 if (real_identical (&c, &cint))
8069 /* Attempt to evaluate pow at compile-time. */
8070 if (TREE_CODE (arg0) == REAL_CST
8071 && !TREE_OVERFLOW (arg0))
8073 REAL_VALUE_TYPE x;
8074 bool inexact;
8076 x = TREE_REAL_CST (arg0);
8077 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8078 if (flag_unsafe_math_optimizations || !inexact)
8079 return build_real (type, x);
8082 /* Strip sign ops from even integer powers. */
8083 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8085 tree narg0 = fold_strip_sign_ops (arg0);
8086 if (narg0)
8087 return build_call_expr (fndecl, 2, narg0, arg1);
8092 if (flag_unsafe_math_optimizations)
8094 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8096 /* Optimize pow(expN(x),y) = expN(x*y). */
8097 if (BUILTIN_EXPONENT_P (fcode))
8099 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8100 tree arg = CALL_EXPR_ARG (arg0, 0);
8101 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8102 return build_call_expr (expfn, 1, arg);
8105 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8106 if (BUILTIN_SQRT_P (fcode))
8108 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8109 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8110 build_real (type, dconsthalf));
8111 return build_call_expr (fndecl, 2, narg0, narg1);
8114 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8115 if (BUILTIN_CBRT_P (fcode))
8117 tree arg = CALL_EXPR_ARG (arg0, 0);
8118 if (tree_expr_nonnegative_p (arg))
8120 const REAL_VALUE_TYPE dconstroot
8121 = real_value_truncate (TYPE_MODE (type), dconstthird);
8122 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8123 build_real (type, dconstroot));
8124 return build_call_expr (fndecl, 2, arg, narg1);
8128 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8129 if (fcode == BUILT_IN_POW
8130 || fcode == BUILT_IN_POWF
8131 || fcode == BUILT_IN_POWL)
8133 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8134 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8135 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8136 return build_call_expr (fndecl, 2, arg00, narg1);
8140 return NULL_TREE;
8143 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8144 Return NULL_TREE if no simplification can be made. */
8145 static tree
8146 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8147 tree arg0, tree arg1, tree type)
8149 if (!validate_arg (arg0, REAL_TYPE)
8150 || !validate_arg (arg1, INTEGER_TYPE))
8151 return NULL_TREE;
8153 /* Optimize pow(1.0,y) = 1.0. */
8154 if (real_onep (arg0))
8155 return omit_one_operand (type, build_real (type, dconst1), arg1);
8157 if (host_integerp (arg1, 0))
8159 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8161 /* Evaluate powi at compile-time. */
8162 if (TREE_CODE (arg0) == REAL_CST
8163 && !TREE_OVERFLOW (arg0))
8165 REAL_VALUE_TYPE x;
8166 x = TREE_REAL_CST (arg0);
8167 real_powi (&x, TYPE_MODE (type), &x, c);
8168 return build_real (type, x);
8171 /* Optimize pow(x,0) = 1.0. */
8172 if (c == 0)
8173 return omit_one_operand (type, build_real (type, dconst1),
8174 arg0);
8176 /* Optimize pow(x,1) = x. */
8177 if (c == 1)
8178 return arg0;
8180 /* Optimize pow(x,-1) = 1.0/x. */
8181 if (c == -1)
8182 return fold_build2 (RDIV_EXPR, type,
8183 build_real (type, dconst1), arg0);
8186 return NULL_TREE;
8189 /* A subroutine of fold_builtin to fold the various exponent
8190 functions. Return NULL_TREE if no simplification can be made.
8191 FUNC is the corresponding MPFR exponent function. */
8193 static tree
8194 fold_builtin_exponent (tree fndecl, tree arg,
8195 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8197 if (validate_arg (arg, REAL_TYPE))
8199 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8200 tree res;
8202 /* Calculate the result when the argument is a constant. */
8203 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8204 return res;
8206 /* Optimize expN(logN(x)) = x. */
8207 if (flag_unsafe_math_optimizations)
8209 const enum built_in_function fcode = builtin_mathfn_code (arg);
8211 if ((func == mpfr_exp
8212 && (fcode == BUILT_IN_LOG
8213 || fcode == BUILT_IN_LOGF
8214 || fcode == BUILT_IN_LOGL))
8215 || (func == mpfr_exp2
8216 && (fcode == BUILT_IN_LOG2
8217 || fcode == BUILT_IN_LOG2F
8218 || fcode == BUILT_IN_LOG2L))
8219 || (func == mpfr_exp10
8220 && (fcode == BUILT_IN_LOG10
8221 || fcode == BUILT_IN_LOG10F
8222 || fcode == BUILT_IN_LOG10L)))
8223 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8227 return NULL_TREE;
8230 /* Return true if VAR is a VAR_DECL or a component thereof. */
8232 static bool
8233 var_decl_component_p (tree var)
8235 tree inner = var;
8236 while (handled_component_p (inner))
8237 inner = TREE_OPERAND (inner, 0);
8238 return SSA_VAR_P (inner);
8241 /* Fold function call to builtin memset. Return
8242 NULL_TREE if no simplification can be made. */
8244 static tree
8245 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8247 tree var, ret;
8248 unsigned HOST_WIDE_INT length, cval;
8250 if (! validate_arg (dest, POINTER_TYPE)
8251 || ! validate_arg (c, INTEGER_TYPE)
8252 || ! validate_arg (len, INTEGER_TYPE))
8253 return NULL_TREE;
8255 if (! host_integerp (len, 1))
8256 return NULL_TREE;
8258 /* If the LEN parameter is zero, return DEST. */
8259 if (integer_zerop (len))
8260 return omit_one_operand (type, dest, c);
8262 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8263 return NULL_TREE;
8265 var = dest;
8266 STRIP_NOPS (var);
8267 if (TREE_CODE (var) != ADDR_EXPR)
8268 return NULL_TREE;
8270 var = TREE_OPERAND (var, 0);
8271 if (TREE_THIS_VOLATILE (var))
8272 return NULL_TREE;
8274 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8275 && !POINTER_TYPE_P (TREE_TYPE (var)))
8276 return NULL_TREE;
8278 if (! var_decl_component_p (var))
8279 return NULL_TREE;
8281 length = tree_low_cst (len, 1);
8282 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8283 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8284 < (int) length)
8285 return NULL_TREE;
8287 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8288 return NULL_TREE;
8290 if (integer_zerop (c))
8291 cval = 0;
8292 else
8294 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8295 return NULL_TREE;
8297 cval = tree_low_cst (c, 1);
8298 cval &= 0xff;
8299 cval |= cval << 8;
8300 cval |= cval << 16;
8301 cval |= (cval << 31) << 1;
8304 ret = build_int_cst_type (TREE_TYPE (var), cval);
8305 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8306 if (ignore)
8307 return ret;
8309 return omit_one_operand (type, dest, ret);
8312 /* Fold function call to builtin memset. Return
8313 NULL_TREE if no simplification can be made. */
8315 static tree
8316 fold_builtin_bzero (tree dest, tree size, bool ignore)
8318 if (! validate_arg (dest, POINTER_TYPE)
8319 || ! validate_arg (size, INTEGER_TYPE))
8320 return NULL_TREE;
8322 if (!ignore)
8323 return NULL_TREE;
8325 /* New argument list transforming bzero(ptr x, int y) to
8326 memset(ptr x, int 0, size_t y). This is done this way
8327 so that if it isn't expanded inline, we fallback to
8328 calling bzero instead of memset. */
8330 return fold_builtin_memset (dest, integer_zero_node,
8331 fold_convert (sizetype, size),
8332 void_type_node, ignore);
8335 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8336 NULL_TREE if no simplification can be made.
8337 If ENDP is 0, return DEST (like memcpy).
8338 If ENDP is 1, return DEST+LEN (like mempcpy).
8339 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8340 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8341 (memmove). */
8343 static tree
8344 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8346 tree destvar, srcvar, expr;
8348 if (! validate_arg (dest, POINTER_TYPE)
8349 || ! validate_arg (src, POINTER_TYPE)
8350 || ! validate_arg (len, INTEGER_TYPE))
8351 return NULL_TREE;
8353 /* If the LEN parameter is zero, return DEST. */
8354 if (integer_zerop (len))
8355 return omit_one_operand (type, dest, src);
8357 /* If SRC and DEST are the same (and not volatile), return
8358 DEST{,+LEN,+LEN-1}. */
8359 if (operand_equal_p (src, dest, 0))
8360 expr = len;
8361 else
8363 tree srctype, desttype;
8364 if (endp == 3)
8366 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8367 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8369 /* Both DEST and SRC must be pointer types.
8370 ??? This is what old code did. Is the testing for pointer types
8371 really mandatory?
8373 If either SRC is readonly or length is 1, we can use memcpy. */
8374 if (dest_align && src_align
8375 && (readonly_data_expr (src)
8376 || (host_integerp (len, 1)
8377 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8378 tree_low_cst (len, 1)))))
8380 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8381 if (!fn)
8382 return NULL_TREE;
8383 return build_call_expr (fn, 3, dest, src, len);
8385 return NULL_TREE;
8388 if (!host_integerp (len, 0))
8389 return NULL_TREE;
8390 /* FIXME:
8391 This logic lose for arguments like (type *)malloc (sizeof (type)),
8392 since we strip the casts of up to VOID return value from malloc.
8393 Perhaps we ought to inherit type from non-VOID argument here? */
8394 STRIP_NOPS (src);
8395 STRIP_NOPS (dest);
8396 srctype = TREE_TYPE (TREE_TYPE (src));
8397 desttype = TREE_TYPE (TREE_TYPE (dest));
8398 if (!srctype || !desttype
8399 || !TYPE_SIZE_UNIT (srctype)
8400 || !TYPE_SIZE_UNIT (desttype)
8401 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8402 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8403 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8404 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8405 return NULL_TREE;
8407 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8408 < (int) TYPE_ALIGN (desttype)
8409 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8410 < (int) TYPE_ALIGN (srctype)))
8411 return NULL_TREE;
8413 if (!ignore)
8414 dest = builtin_save_expr (dest);
8416 srcvar = build_fold_indirect_ref (src);
8417 if (TREE_THIS_VOLATILE (srcvar))
8418 return NULL_TREE;
8419 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8420 return NULL_TREE;
8421 /* With memcpy, it is possible to bypass aliasing rules, so without
8422 this check i. e. execute/20060930-2.c would be misoptimized, because
8423 it use conflicting alias set to hold argument for the memcpy call.
8424 This check is probably unnecesary with -fno-strict-aliasing.
8425 Similarly for destvar. See also PR29286. */
8426 if (!var_decl_component_p (srcvar)
8427 /* Accept: memcpy (*char_var, "test", 1); that simplify
8428 to char_var='t'; */
8429 || is_gimple_min_invariant (srcvar)
8430 || readonly_data_expr (src))
8431 return NULL_TREE;
8433 destvar = build_fold_indirect_ref (dest);
8434 if (TREE_THIS_VOLATILE (destvar))
8435 return NULL_TREE;
8436 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8437 return NULL_TREE;
8438 if (!var_decl_component_p (destvar))
8439 return NULL_TREE;
8441 if (srctype == desttype
8442 || (gimple_in_ssa_p (cfun)
8443 && tree_ssa_useless_type_conversion_1 (desttype, srctype)))
8444 expr = srcvar;
8445 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8446 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8447 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8448 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8449 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8450 else
8451 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8452 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8455 if (ignore)
8456 return expr;
8458 if (endp == 0 || endp == 3)
8459 return omit_one_operand (type, dest, expr);
8461 if (expr == len)
8462 expr = NULL_TREE;
8464 if (endp == 2)
8465 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8466 ssize_int (1));
8468 len = fold_convert (TREE_TYPE (dest), len);
8469 dest = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
8470 dest = fold_convert (type, dest);
8471 if (expr)
8472 dest = omit_one_operand (type, dest, expr);
8473 return dest;
8476 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8477 If LEN is not NULL, it represents the length of the string to be
8478 copied. Return NULL_TREE if no simplification can be made. */
8480 tree
8481 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8483 tree fn;
8485 if (!validate_arg (dest, POINTER_TYPE)
8486 || !validate_arg (src, POINTER_TYPE))
8487 return NULL_TREE;
8489 /* If SRC and DEST are the same (and not volatile), return DEST. */
8490 if (operand_equal_p (src, dest, 0))
8491 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8493 if (optimize_size)
8494 return NULL_TREE;
8496 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8497 if (!fn)
8498 return NULL_TREE;
8500 if (!len)
8502 len = c_strlen (src, 1);
8503 if (! len || TREE_SIDE_EFFECTS (len))
8504 return NULL_TREE;
8507 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8508 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8509 build_call_expr (fn, 3, dest, src, len));
8512 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8513 If SLEN is not NULL, it represents the length of the source string.
8514 Return NULL_TREE if no simplification can be made. */
8516 tree
8517 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8519 tree fn;
8521 if (!validate_arg (dest, POINTER_TYPE)
8522 || !validate_arg (src, POINTER_TYPE)
8523 || !validate_arg (len, INTEGER_TYPE))
8524 return NULL_TREE;
8526 /* If the LEN parameter is zero, return DEST. */
8527 if (integer_zerop (len))
8528 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8530 /* We can't compare slen with len as constants below if len is not a
8531 constant. */
8532 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8533 return NULL_TREE;
8535 if (!slen)
8536 slen = c_strlen (src, 1);
8538 /* Now, we must be passed a constant src ptr parameter. */
8539 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8540 return NULL_TREE;
8542 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8544 /* We do not support simplification of this case, though we do
8545 support it when expanding trees into RTL. */
8546 /* FIXME: generate a call to __builtin_memset. */
8547 if (tree_int_cst_lt (slen, len))
8548 return NULL_TREE;
8550 /* OK transform into builtin memcpy. */
8551 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8552 if (!fn)
8553 return NULL_TREE;
8554 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8555 build_call_expr (fn, 3, dest, src, len));
8558 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8559 Return NULL_TREE if no simplification can be made. */
8561 static tree
8562 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8564 const char *p1, *p2;
8566 if (!validate_arg (arg1, POINTER_TYPE)
8567 || !validate_arg (arg2, POINTER_TYPE)
8568 || !validate_arg (len, INTEGER_TYPE))
8569 return NULL_TREE;
8571 /* If the LEN parameter is zero, return zero. */
8572 if (integer_zerop (len))
8573 return omit_two_operands (integer_type_node, integer_zero_node,
8574 arg1, arg2);
8576 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8577 if (operand_equal_p (arg1, arg2, 0))
8578 return omit_one_operand (integer_type_node, integer_zero_node, len);
8580 p1 = c_getstr (arg1);
8581 p2 = c_getstr (arg2);
8583 /* If all arguments are constant, and the value of len is not greater
8584 than the lengths of arg1 and arg2, evaluate at compile-time. */
8585 if (host_integerp (len, 1) && p1 && p2
8586 && compare_tree_int (len, strlen (p1) + 1) <= 0
8587 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8589 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8591 if (r > 0)
8592 return integer_one_node;
8593 else if (r < 0)
8594 return integer_minus_one_node;
8595 else
8596 return integer_zero_node;
8599 /* If len parameter is one, return an expression corresponding to
8600 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8601 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8603 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8604 tree cst_uchar_ptr_node
8605 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8607 tree ind1 = fold_convert (integer_type_node,
8608 build1 (INDIRECT_REF, cst_uchar_node,
8609 fold_convert (cst_uchar_ptr_node,
8610 arg1)));
8611 tree ind2 = fold_convert (integer_type_node,
8612 build1 (INDIRECT_REF, cst_uchar_node,
8613 fold_convert (cst_uchar_ptr_node,
8614 arg2)));
8615 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8618 return NULL_TREE;
8621 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8622 Return NULL_TREE if no simplification can be made. */
8624 static tree
8625 fold_builtin_strcmp (tree arg1, tree arg2)
8627 const char *p1, *p2;
8629 if (!validate_arg (arg1, POINTER_TYPE)
8630 || !validate_arg (arg2, POINTER_TYPE))
8631 return NULL_TREE;
8633 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8634 if (operand_equal_p (arg1, arg2, 0))
8635 return integer_zero_node;
8637 p1 = c_getstr (arg1);
8638 p2 = c_getstr (arg2);
8640 if (p1 && p2)
8642 const int i = strcmp (p1, p2);
8643 if (i < 0)
8644 return integer_minus_one_node;
8645 else if (i > 0)
8646 return integer_one_node;
8647 else
8648 return integer_zero_node;
8651 /* If the second arg is "", return *(const unsigned char*)arg1. */
8652 if (p2 && *p2 == '\0')
8654 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8655 tree cst_uchar_ptr_node
8656 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8658 return fold_convert (integer_type_node,
8659 build1 (INDIRECT_REF, cst_uchar_node,
8660 fold_convert (cst_uchar_ptr_node,
8661 arg1)));
8664 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8665 if (p1 && *p1 == '\0')
8667 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8668 tree cst_uchar_ptr_node
8669 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8671 tree temp = fold_convert (integer_type_node,
8672 build1 (INDIRECT_REF, cst_uchar_node,
8673 fold_convert (cst_uchar_ptr_node,
8674 arg2)));
8675 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8678 return NULL_TREE;
8681 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8682 Return NULL_TREE if no simplification can be made. */
8684 static tree
8685 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
8687 const char *p1, *p2;
8689 if (!validate_arg (arg1, POINTER_TYPE)
8690 || !validate_arg (arg2, POINTER_TYPE)
8691 || !validate_arg (len, INTEGER_TYPE))
8692 return NULL_TREE;
8694 /* If the LEN parameter is zero, return zero. */
8695 if (integer_zerop (len))
8696 return omit_two_operands (integer_type_node, integer_zero_node,
8697 arg1, arg2);
8699 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8700 if (operand_equal_p (arg1, arg2, 0))
8701 return omit_one_operand (integer_type_node, integer_zero_node, len);
8703 p1 = c_getstr (arg1);
8704 p2 = c_getstr (arg2);
8706 if (host_integerp (len, 1) && p1 && p2)
8708 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8709 if (i > 0)
8710 return integer_one_node;
8711 else if (i < 0)
8712 return integer_minus_one_node;
8713 else
8714 return integer_zero_node;
8717 /* If the second arg is "", and the length is greater than zero,
8718 return *(const unsigned char*)arg1. */
8719 if (p2 && *p2 == '\0'
8720 && TREE_CODE (len) == INTEGER_CST
8721 && tree_int_cst_sgn (len) == 1)
8723 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8724 tree cst_uchar_ptr_node
8725 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8727 return fold_convert (integer_type_node,
8728 build1 (INDIRECT_REF, cst_uchar_node,
8729 fold_convert (cst_uchar_ptr_node,
8730 arg1)));
8733 /* If the first arg is "", and the length is greater than zero,
8734 return -*(const unsigned char*)arg2. */
8735 if (p1 && *p1 == '\0'
8736 && TREE_CODE (len) == INTEGER_CST
8737 && tree_int_cst_sgn (len) == 1)
8739 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8740 tree cst_uchar_ptr_node
8741 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8743 tree temp = fold_convert (integer_type_node,
8744 build1 (INDIRECT_REF, cst_uchar_node,
8745 fold_convert (cst_uchar_ptr_node,
8746 arg2)));
8747 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8750 /* If len parameter is one, return an expression corresponding to
8751 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8752 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8754 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8755 tree cst_uchar_ptr_node
8756 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8758 tree ind1 = fold_convert (integer_type_node,
8759 build1 (INDIRECT_REF, cst_uchar_node,
8760 fold_convert (cst_uchar_ptr_node,
8761 arg1)));
8762 tree ind2 = fold_convert (integer_type_node,
8763 build1 (INDIRECT_REF, cst_uchar_node,
8764 fold_convert (cst_uchar_ptr_node,
8765 arg2)));
8766 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8769 return NULL_TREE;
8772 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8773 ARG. Return NULL_TREE if no simplification can be made. */
8775 static tree
8776 fold_builtin_signbit (tree arg, tree type)
8778 tree temp;
8780 if (!validate_arg (arg, REAL_TYPE))
8781 return NULL_TREE;
8783 /* If ARG is a compile-time constant, determine the result. */
8784 if (TREE_CODE (arg) == REAL_CST
8785 && !TREE_OVERFLOW (arg))
8787 REAL_VALUE_TYPE c;
8789 c = TREE_REAL_CST (arg);
8790 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8791 return fold_convert (type, temp);
8794 /* If ARG is non-negative, the result is always zero. */
8795 if (tree_expr_nonnegative_p (arg))
8796 return omit_one_operand (type, integer_zero_node, arg);
8798 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8799 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8800 return fold_build2 (LT_EXPR, type, arg,
8801 build_real (TREE_TYPE (arg), dconst0));
8803 return NULL_TREE;
8806 /* Fold function call to builtin copysign, copysignf or copysignl with
8807 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8808 be made. */
8810 static tree
8811 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
8813 tree tem;
8815 if (!validate_arg (arg1, REAL_TYPE)
8816 || !validate_arg (arg2, REAL_TYPE))
8817 return NULL_TREE;
8819 /* copysign(X,X) is X. */
8820 if (operand_equal_p (arg1, arg2, 0))
8821 return fold_convert (type, arg1);
8823 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8824 if (TREE_CODE (arg1) == REAL_CST
8825 && TREE_CODE (arg2) == REAL_CST
8826 && !TREE_OVERFLOW (arg1)
8827 && !TREE_OVERFLOW (arg2))
8829 REAL_VALUE_TYPE c1, c2;
8831 c1 = TREE_REAL_CST (arg1);
8832 c2 = TREE_REAL_CST (arg2);
8833 /* c1.sign := c2.sign. */
8834 real_copysign (&c1, &c2);
8835 return build_real (type, c1);
8838 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8839 Remember to evaluate Y for side-effects. */
8840 if (tree_expr_nonnegative_p (arg2))
8841 return omit_one_operand (type,
8842 fold_build1 (ABS_EXPR, type, arg1),
8843 arg2);
8845 /* Strip sign changing operations for the first argument. */
8846 tem = fold_strip_sign_ops (arg1);
8847 if (tem)
8848 return build_call_expr (fndecl, 2, tem, arg2);
8850 return NULL_TREE;
8853 /* Fold a call to builtin isascii with argument ARG. */
8855 static tree
8856 fold_builtin_isascii (tree arg)
8858 if (!validate_arg (arg, INTEGER_TYPE))
8859 return NULL_TREE;
8860 else
8862 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8863 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
8864 build_int_cst (NULL_TREE,
8865 ~ (unsigned HOST_WIDE_INT) 0x7f));
8866 return fold_build2 (EQ_EXPR, integer_type_node,
8867 arg, integer_zero_node);
8871 /* Fold a call to builtin toascii with argument ARG. */
8873 static tree
8874 fold_builtin_toascii (tree arg)
8876 if (!validate_arg (arg, INTEGER_TYPE))
8877 return NULL_TREE;
8879 /* Transform toascii(c) -> (c & 0x7f). */
8880 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8881 build_int_cst (NULL_TREE, 0x7f));
8884 /* Fold a call to builtin isdigit with argument ARG. */
8886 static tree
8887 fold_builtin_isdigit (tree arg)
8889 if (!validate_arg (arg, INTEGER_TYPE))
8890 return NULL_TREE;
8891 else
8893 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8894 /* According to the C standard, isdigit is unaffected by locale.
8895 However, it definitely is affected by the target character set. */
8896 unsigned HOST_WIDE_INT target_digit0
8897 = lang_hooks.to_target_charset ('0');
8899 if (target_digit0 == 0)
8900 return NULL_TREE;
8902 arg = fold_convert (unsigned_type_node, arg);
8903 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
8904 build_int_cst (unsigned_type_node, target_digit0));
8905 return fold_build2 (LE_EXPR, integer_type_node, arg,
8906 build_int_cst (unsigned_type_node, 9));
8910 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8912 static tree
8913 fold_builtin_fabs (tree arg, tree type)
8915 if (!validate_arg (arg, REAL_TYPE))
8916 return NULL_TREE;
8918 arg = fold_convert (type, arg);
8919 if (TREE_CODE (arg) == REAL_CST)
8920 return fold_abs_const (arg, type);
8921 return fold_build1 (ABS_EXPR, type, arg);
8924 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8926 static tree
8927 fold_builtin_abs (tree arg, tree type)
8929 if (!validate_arg (arg, INTEGER_TYPE))
8930 return NULL_TREE;
8932 arg = fold_convert (type, arg);
8933 if (TREE_CODE (arg) == INTEGER_CST)
8934 return fold_abs_const (arg, type);
8935 return fold_build1 (ABS_EXPR, type, arg);
8938 /* Fold a call to builtin fmin or fmax. */
8940 static tree
8941 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
8943 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8945 /* Calculate the result when the argument is a constant. */
8946 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8948 if (res)
8949 return res;
8951 /* If either argument is NaN, return the other one. Avoid the
8952 transformation if we get (and honor) a signalling NaN. Using
8953 omit_one_operand() ensures we create a non-lvalue. */
8954 if (TREE_CODE (arg0) == REAL_CST
8955 && real_isnan (&TREE_REAL_CST (arg0))
8956 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8957 || ! TREE_REAL_CST (arg0).signalling))
8958 return omit_one_operand (type, arg1, arg0);
8959 if (TREE_CODE (arg1) == REAL_CST
8960 && real_isnan (&TREE_REAL_CST (arg1))
8961 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
8962 || ! TREE_REAL_CST (arg1).signalling))
8963 return omit_one_operand (type, arg0, arg1);
8965 /* Transform fmin/fmax(x,x) -> x. */
8966 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8967 return omit_one_operand (type, arg0, arg1);
8969 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8970 functions to return the numeric arg if the other one is NaN.
8971 These tree codes don't honor that, so only transform if
8972 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8973 handled, so we don't have to worry about it either. */
8974 if (flag_finite_math_only)
8975 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
8976 fold_convert (type, arg0),
8977 fold_convert (type, arg1));
8979 return NULL_TREE;
8982 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8984 static tree
8985 fold_builtin_carg (tree arg, tree type)
8987 if (validate_arg (arg, COMPLEX_TYPE))
8989 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8991 if (atan2_fn)
8993 tree new_arg = builtin_save_expr (arg);
8994 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
8995 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
8996 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9000 return NULL_TREE;
9003 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9004 then we can assume the base is two. If it's false, then we have to
9005 check the mode of the TYPE parameter in certain cases. */
9007 static tree
9008 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9010 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9012 STRIP_NOPS (arg0);
9013 STRIP_NOPS (arg1);
9015 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9016 if (real_zerop (arg0) || integer_zerop (arg1)
9017 || (TREE_CODE (arg0) == REAL_CST
9018 && (real_isnan (&TREE_REAL_CST (arg0))
9019 || real_isinf (&TREE_REAL_CST (arg0)))))
9020 return omit_one_operand (type, arg0, arg1);
9022 /* If both arguments are constant, then try to evaluate it. */
9023 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9024 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9025 && host_integerp (arg1, 0))
9027 /* Bound the maximum adjustment to twice the range of the
9028 mode's valid exponents. Use abs to ensure the range is
9029 positive as a sanity check. */
9030 const long max_exp_adj = 2 *
9031 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9032 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9034 /* Get the user-requested adjustment. */
9035 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9037 /* The requested adjustment must be inside this range. This
9038 is a preliminary cap to avoid things like overflow, we
9039 may still fail to compute the result for other reasons. */
9040 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9042 REAL_VALUE_TYPE initial_result;
9044 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9046 /* Ensure we didn't overflow. */
9047 if (! real_isinf (&initial_result))
9049 const REAL_VALUE_TYPE trunc_result
9050 = real_value_truncate (TYPE_MODE (type), initial_result);
9052 /* Only proceed if the target mode can hold the
9053 resulting value. */
9054 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9055 return build_real (type, trunc_result);
9061 return NULL_TREE;
9064 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9065 ARG is the argument for the call. */
9067 static tree
9068 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9070 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9071 REAL_VALUE_TYPE r;
9073 if (!validate_arg (arg, REAL_TYPE))
9075 error ("non-floating-point argument to function %qs",
9076 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9077 return error_mark_node;
9080 switch (builtin_index)
9082 case BUILT_IN_ISINF:
9083 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9084 return omit_one_operand (type, integer_zero_node, arg);
9086 if (TREE_CODE (arg) == REAL_CST)
9088 r = TREE_REAL_CST (arg);
9089 if (real_isinf (&r))
9090 return real_compare (GT_EXPR, &r, &dconst0)
9091 ? integer_one_node : integer_minus_one_node;
9092 else
9093 return integer_zero_node;
9096 return NULL_TREE;
9098 case BUILT_IN_FINITE:
9099 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9100 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9101 return omit_one_operand (type, integer_one_node, arg);
9103 if (TREE_CODE (arg) == REAL_CST)
9105 r = TREE_REAL_CST (arg);
9106 return real_isinf (&r) || real_isnan (&r)
9107 ? integer_zero_node : integer_one_node;
9110 return NULL_TREE;
9112 case BUILT_IN_ISNAN:
9113 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9114 return omit_one_operand (type, integer_zero_node, arg);
9116 if (TREE_CODE (arg) == REAL_CST)
9118 r = TREE_REAL_CST (arg);
9119 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9122 arg = builtin_save_expr (arg);
9123 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9125 default:
9126 gcc_unreachable ();
9130 /* Fold a call to an unordered comparison function such as
9131 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9132 being called and ARG0 and ARG1 are the arguments for the call.
9133 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9134 the opposite of the desired result. UNORDERED_CODE is used
9135 for modes that can hold NaNs and ORDERED_CODE is used for
9136 the rest. */
9138 static tree
9139 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9140 enum tree_code unordered_code,
9141 enum tree_code ordered_code)
9143 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9144 enum tree_code code;
9145 tree type0, type1;
9146 enum tree_code code0, code1;
9147 tree cmp_type = NULL_TREE;
9149 type0 = TREE_TYPE (arg0);
9150 type1 = TREE_TYPE (arg1);
9152 code0 = TREE_CODE (type0);
9153 code1 = TREE_CODE (type1);
9155 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9156 /* Choose the wider of two real types. */
9157 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9158 ? type0 : type1;
9159 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9160 cmp_type = type0;
9161 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9162 cmp_type = type1;
9163 else
9165 error ("non-floating-point argument to function %qs",
9166 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9167 return error_mark_node;
9170 arg0 = fold_convert (cmp_type, arg0);
9171 arg1 = fold_convert (cmp_type, arg1);
9173 if (unordered_code == UNORDERED_EXPR)
9175 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9176 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9177 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9180 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9181 : ordered_code;
9182 return fold_build1 (TRUTH_NOT_EXPR, type,
9183 fold_build2 (code, type, arg0, arg1));
9186 /* Fold a call to built-in function FNDECL with 0 arguments.
9187 IGNORE is true if the result of the function call is ignored. This
9188 function returns NULL_TREE if no simplification was possible. */
9190 static tree
9191 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9193 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9194 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9195 switch (fcode)
9197 CASE_FLT_FN (BUILT_IN_INF):
9198 case BUILT_IN_INFD32:
9199 case BUILT_IN_INFD64:
9200 case BUILT_IN_INFD128:
9201 return fold_builtin_inf (type, true);
9203 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9204 return fold_builtin_inf (type, false);
9206 case BUILT_IN_CLASSIFY_TYPE:
9207 return fold_builtin_classify_type (NULL_TREE);
9209 default:
9210 break;
9212 return NULL_TREE;
9215 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9216 IGNORE is true if the result of the function call is ignored. This
9217 function returns NULL_TREE if no simplification was possible. */
9219 static tree
9220 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9222 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9223 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9224 switch (fcode)
9227 case BUILT_IN_CONSTANT_P:
9229 tree val = fold_builtin_constant_p (arg0);
9231 /* Gimplification will pull the CALL_EXPR for the builtin out of
9232 an if condition. When not optimizing, we'll not CSE it back.
9233 To avoid link error types of regressions, return false now. */
9234 if (!val && !optimize)
9235 val = integer_zero_node;
9237 return val;
9240 case BUILT_IN_CLASSIFY_TYPE:
9241 return fold_builtin_classify_type (arg0);
9243 case BUILT_IN_STRLEN:
9244 return fold_builtin_strlen (arg0);
9246 CASE_FLT_FN (BUILT_IN_FABS):
9247 return fold_builtin_fabs (arg0, type);
9249 case BUILT_IN_ABS:
9250 case BUILT_IN_LABS:
9251 case BUILT_IN_LLABS:
9252 case BUILT_IN_IMAXABS:
9253 return fold_builtin_abs (arg0, type);
9255 CASE_FLT_FN (BUILT_IN_CONJ):
9256 if (validate_arg (arg0, COMPLEX_TYPE))
9257 return fold_build1 (CONJ_EXPR, type, arg0);
9258 break;
9260 CASE_FLT_FN (BUILT_IN_CREAL):
9261 if (validate_arg (arg0, COMPLEX_TYPE))
9262 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9263 break;
9265 CASE_FLT_FN (BUILT_IN_CIMAG):
9266 if (validate_arg (arg0, COMPLEX_TYPE))
9267 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9268 break;
9270 CASE_FLT_FN (BUILT_IN_CCOS):
9271 CASE_FLT_FN (BUILT_IN_CCOSH):
9272 /* These functions are "even", i.e. f(x) == f(-x). */
9273 if (validate_arg (arg0, COMPLEX_TYPE))
9275 tree narg = fold_strip_sign_ops (arg0);
9276 if (narg)
9277 return build_call_expr (fndecl, 1, narg);
9279 break;
9281 CASE_FLT_FN (BUILT_IN_CABS):
9282 return fold_builtin_cabs (arg0, type, fndecl);
9284 CASE_FLT_FN (BUILT_IN_CARG):
9285 return fold_builtin_carg (arg0, type);
9287 CASE_FLT_FN (BUILT_IN_SQRT):
9288 return fold_builtin_sqrt (arg0, type);
9290 CASE_FLT_FN (BUILT_IN_CBRT):
9291 return fold_builtin_cbrt (arg0, type);
9293 CASE_FLT_FN (BUILT_IN_ASIN):
9294 if (validate_arg (arg0, REAL_TYPE))
9295 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9296 &dconstm1, &dconst1, true);
9297 break;
9299 CASE_FLT_FN (BUILT_IN_ACOS):
9300 if (validate_arg (arg0, REAL_TYPE))
9301 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9302 &dconstm1, &dconst1, true);
9303 break;
9305 CASE_FLT_FN (BUILT_IN_ATAN):
9306 if (validate_arg (arg0, REAL_TYPE))
9307 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9308 break;
9310 CASE_FLT_FN (BUILT_IN_ASINH):
9311 if (validate_arg (arg0, REAL_TYPE))
9312 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9313 break;
9315 CASE_FLT_FN (BUILT_IN_ACOSH):
9316 if (validate_arg (arg0, REAL_TYPE))
9317 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9318 &dconst1, NULL, true);
9319 break;
9321 CASE_FLT_FN (BUILT_IN_ATANH):
9322 if (validate_arg (arg0, REAL_TYPE))
9323 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9324 &dconstm1, &dconst1, false);
9325 break;
9327 CASE_FLT_FN (BUILT_IN_SIN):
9328 if (validate_arg (arg0, REAL_TYPE))
9329 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9330 break;
9332 CASE_FLT_FN (BUILT_IN_COS):
9333 return fold_builtin_cos (arg0, type, fndecl);
9334 break;
9336 CASE_FLT_FN (BUILT_IN_TAN):
9337 return fold_builtin_tan (arg0, type);
9339 CASE_FLT_FN (BUILT_IN_CEXP):
9340 return fold_builtin_cexp (arg0, type);
9342 CASE_FLT_FN (BUILT_IN_CEXPI):
9343 if (validate_arg (arg0, REAL_TYPE))
9344 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9345 break;
9347 CASE_FLT_FN (BUILT_IN_SINH):
9348 if (validate_arg (arg0, REAL_TYPE))
9349 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9350 break;
9352 CASE_FLT_FN (BUILT_IN_COSH):
9353 return fold_builtin_cosh (arg0, type, fndecl);
9355 CASE_FLT_FN (BUILT_IN_TANH):
9356 if (validate_arg (arg0, REAL_TYPE))
9357 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9358 break;
9360 CASE_FLT_FN (BUILT_IN_ERF):
9361 if (validate_arg (arg0, REAL_TYPE))
9362 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9363 break;
9365 CASE_FLT_FN (BUILT_IN_ERFC):
9366 if (validate_arg (arg0, REAL_TYPE))
9367 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9368 break;
9370 CASE_FLT_FN (BUILT_IN_TGAMMA):
9371 if (validate_arg (arg0, REAL_TYPE))
9372 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9373 break;
9375 CASE_FLT_FN (BUILT_IN_EXP):
9376 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9378 CASE_FLT_FN (BUILT_IN_EXP2):
9379 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9381 CASE_FLT_FN (BUILT_IN_EXP10):
9382 CASE_FLT_FN (BUILT_IN_POW10):
9383 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9385 CASE_FLT_FN (BUILT_IN_EXPM1):
9386 if (validate_arg (arg0, REAL_TYPE))
9387 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9388 break;
9390 CASE_FLT_FN (BUILT_IN_LOG):
9391 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9393 CASE_FLT_FN (BUILT_IN_LOG2):
9394 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9396 CASE_FLT_FN (BUILT_IN_LOG10):
9397 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9399 CASE_FLT_FN (BUILT_IN_LOG1P):
9400 if (validate_arg (arg0, REAL_TYPE))
9401 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9402 &dconstm1, NULL, false);
9403 break;
9405 CASE_FLT_FN (BUILT_IN_NAN):
9406 case BUILT_IN_NAND32:
9407 case BUILT_IN_NAND64:
9408 case BUILT_IN_NAND128:
9409 return fold_builtin_nan (arg0, type, true);
9411 CASE_FLT_FN (BUILT_IN_NANS):
9412 return fold_builtin_nan (arg0, type, false);
9414 CASE_FLT_FN (BUILT_IN_FLOOR):
9415 return fold_builtin_floor (fndecl, arg0);
9417 CASE_FLT_FN (BUILT_IN_CEIL):
9418 return fold_builtin_ceil (fndecl, arg0);
9420 CASE_FLT_FN (BUILT_IN_TRUNC):
9421 return fold_builtin_trunc (fndecl, arg0);
9423 CASE_FLT_FN (BUILT_IN_ROUND):
9424 return fold_builtin_round (fndecl, arg0);
9426 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9427 CASE_FLT_FN (BUILT_IN_RINT):
9428 return fold_trunc_transparent_mathfn (fndecl, arg0);
9430 CASE_FLT_FN (BUILT_IN_LCEIL):
9431 CASE_FLT_FN (BUILT_IN_LLCEIL):
9432 CASE_FLT_FN (BUILT_IN_LFLOOR):
9433 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9434 CASE_FLT_FN (BUILT_IN_LROUND):
9435 CASE_FLT_FN (BUILT_IN_LLROUND):
9436 return fold_builtin_int_roundingfn (fndecl, arg0);
9438 CASE_FLT_FN (BUILT_IN_LRINT):
9439 CASE_FLT_FN (BUILT_IN_LLRINT):
9440 return fold_fixed_mathfn (fndecl, arg0);
9442 case BUILT_IN_BSWAP32:
9443 case BUILT_IN_BSWAP64:
9444 return fold_builtin_bswap (fndecl, arg0);
9446 CASE_INT_FN (BUILT_IN_FFS):
9447 CASE_INT_FN (BUILT_IN_CLZ):
9448 CASE_INT_FN (BUILT_IN_CTZ):
9449 CASE_INT_FN (BUILT_IN_POPCOUNT):
9450 CASE_INT_FN (BUILT_IN_PARITY):
9451 return fold_builtin_bitop (fndecl, arg0);
9453 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9454 return fold_builtin_signbit (arg0, type);
9456 case BUILT_IN_ISASCII:
9457 return fold_builtin_isascii (arg0);
9459 case BUILT_IN_TOASCII:
9460 return fold_builtin_toascii (arg0);
9462 case BUILT_IN_ISDIGIT:
9463 return fold_builtin_isdigit (arg0);
9465 CASE_FLT_FN (BUILT_IN_FINITE):
9466 case BUILT_IN_FINITED32:
9467 case BUILT_IN_FINITED64:
9468 case BUILT_IN_FINITED128:
9469 return fold_builtin_classify (fndecl, arg0, BUILT_IN_FINITE);
9471 CASE_FLT_FN (BUILT_IN_ISINF):
9472 case BUILT_IN_ISINFD32:
9473 case BUILT_IN_ISINFD64:
9474 case BUILT_IN_ISINFD128:
9475 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
9477 CASE_FLT_FN (BUILT_IN_ISNAN):
9478 case BUILT_IN_ISNAND32:
9479 case BUILT_IN_ISNAND64:
9480 case BUILT_IN_ISNAND128:
9481 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
9483 case BUILT_IN_PRINTF:
9484 case BUILT_IN_PRINTF_UNLOCKED:
9485 case BUILT_IN_VPRINTF:
9486 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
9488 default:
9489 break;
9492 return NULL_TREE;
9496 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9497 IGNORE is true if the result of the function call is ignored. This
9498 function returns NULL_TREE if no simplification was possible. */
9500 static tree
9501 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
9503 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9504 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9506 switch (fcode)
9509 CASE_FLT_FN (BUILT_IN_ATAN2):
9510 if (validate_arg (arg0, REAL_TYPE)
9511 && validate_arg(arg1, REAL_TYPE))
9512 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9513 break;
9515 CASE_FLT_FN (BUILT_IN_FDIM):
9516 if (validate_arg (arg0, REAL_TYPE)
9517 && validate_arg(arg1, REAL_TYPE))
9518 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9519 break;
9521 CASE_FLT_FN (BUILT_IN_HYPOT):
9522 return fold_builtin_hypot (fndecl, arg0, arg1, type);
9524 CASE_FLT_FN (BUILT_IN_LDEXP):
9525 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
9526 CASE_FLT_FN (BUILT_IN_SCALBN):
9527 CASE_FLT_FN (BUILT_IN_SCALBLN):
9528 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
9530 case BUILT_IN_BZERO:
9531 return fold_builtin_bzero (arg0, arg1, ignore);
9533 case BUILT_IN_FPUTS:
9534 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
9536 case BUILT_IN_FPUTS_UNLOCKED:
9537 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
9539 case BUILT_IN_STRSTR:
9540 return fold_builtin_strstr (arg0, arg1, type);
9542 case BUILT_IN_STRCAT:
9543 return fold_builtin_strcat (arg0, arg1);
9545 case BUILT_IN_STRSPN:
9546 return fold_builtin_strspn (arg0, arg1);
9548 case BUILT_IN_STRCSPN:
9549 return fold_builtin_strcspn (arg0, arg1);
9551 case BUILT_IN_STRCHR:
9552 case BUILT_IN_INDEX:
9553 return fold_builtin_strchr (arg0, arg1, type);
9555 case BUILT_IN_STRRCHR:
9556 case BUILT_IN_RINDEX:
9557 return fold_builtin_strrchr (arg0, arg1, type);
9559 case BUILT_IN_STRCPY:
9560 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
9562 case BUILT_IN_STRCMP:
9563 return fold_builtin_strcmp (arg0, arg1);
9565 case BUILT_IN_STRPBRK:
9566 return fold_builtin_strpbrk (arg0, arg1, type);
9568 case BUILT_IN_EXPECT:
9569 return fold_builtin_expect (arg0);
9571 CASE_FLT_FN (BUILT_IN_POW):
9572 return fold_builtin_pow (fndecl, arg0, arg1, type);
9574 CASE_FLT_FN (BUILT_IN_POWI):
9575 return fold_builtin_powi (fndecl, arg0, arg1, type);
9577 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9578 return fold_builtin_copysign (fndecl, arg0, arg1, type);
9580 CASE_FLT_FN (BUILT_IN_FMIN):
9581 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
9583 CASE_FLT_FN (BUILT_IN_FMAX):
9584 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
9586 case BUILT_IN_ISGREATER:
9587 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
9588 case BUILT_IN_ISGREATEREQUAL:
9589 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
9590 case BUILT_IN_ISLESS:
9591 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
9592 case BUILT_IN_ISLESSEQUAL:
9593 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
9594 case BUILT_IN_ISLESSGREATER:
9595 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9596 case BUILT_IN_ISUNORDERED:
9597 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
9598 NOP_EXPR);
9600 /* We do the folding for va_start in the expander. */
9601 case BUILT_IN_VA_START:
9602 break;
9604 case BUILT_IN_SPRINTF:
9605 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
9607 case BUILT_IN_OBJECT_SIZE:
9608 return fold_builtin_object_size (arg0, arg1);
9610 case BUILT_IN_PRINTF:
9611 case BUILT_IN_PRINTF_UNLOCKED:
9612 case BUILT_IN_VPRINTF:
9613 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
9615 case BUILT_IN_PRINTF_CHK:
9616 case BUILT_IN_VPRINTF_CHK:
9617 if (!validate_arg (arg0, INTEGER_TYPE)
9618 || TREE_SIDE_EFFECTS (arg0))
9619 return NULL_TREE;
9620 else
9621 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
9622 break;
9624 case BUILT_IN_FPRINTF:
9625 case BUILT_IN_FPRINTF_UNLOCKED:
9626 case BUILT_IN_VFPRINTF:
9627 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
9628 ignore, fcode);
9630 default:
9631 break;
9633 return NULL_TREE;
9636 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9637 and ARG2. IGNORE is true if the result of the function call is ignored.
9638 This function returns NULL_TREE if no simplification was possible. */
9640 static tree
9641 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
9643 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9644 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9645 switch (fcode)
9648 CASE_FLT_FN (BUILT_IN_SINCOS):
9649 return fold_builtin_sincos (arg0, arg1, arg2);
9651 CASE_FLT_FN (BUILT_IN_FMA):
9652 if (validate_arg (arg0, REAL_TYPE)
9653 && validate_arg(arg1, REAL_TYPE)
9654 && validate_arg(arg2, REAL_TYPE))
9655 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9656 break;
9658 case BUILT_IN_MEMSET:
9659 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
9661 case BUILT_IN_BCOPY:
9662 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
9664 case BUILT_IN_MEMCPY:
9665 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
9667 case BUILT_IN_MEMPCPY:
9668 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
9670 case BUILT_IN_MEMMOVE:
9671 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
9673 case BUILT_IN_STRNCAT:
9674 return fold_builtin_strncat (arg0, arg1, arg2);
9676 case BUILT_IN_STRNCPY:
9677 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
9679 case BUILT_IN_STRNCMP:
9680 return fold_builtin_strncmp (arg0, arg1, arg2);
9682 case BUILT_IN_BCMP:
9683 case BUILT_IN_MEMCMP:
9684 return fold_builtin_memcmp (arg0, arg1, arg2);;
9686 case BUILT_IN_SPRINTF:
9687 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
9689 case BUILT_IN_STRCPY_CHK:
9690 case BUILT_IN_STPCPY_CHK:
9691 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
9692 ignore, fcode);
9694 case BUILT_IN_STRCAT_CHK:
9695 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
9697 case BUILT_IN_PRINTF_CHK:
9698 case BUILT_IN_VPRINTF_CHK:
9699 if (!validate_arg (arg0, INTEGER_TYPE)
9700 || TREE_SIDE_EFFECTS (arg0))
9701 return NULL_TREE;
9702 else
9703 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
9704 break;
9706 case BUILT_IN_FPRINTF:
9707 case BUILT_IN_FPRINTF_UNLOCKED:
9708 case BUILT_IN_VFPRINTF:
9709 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
9711 case BUILT_IN_FPRINTF_CHK:
9712 case BUILT_IN_VFPRINTF_CHK:
9713 if (!validate_arg (arg1, INTEGER_TYPE)
9714 || TREE_SIDE_EFFECTS (arg1))
9715 return NULL_TREE;
9716 else
9717 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
9718 ignore, fcode);
9720 default:
9721 break;
9723 return NULL_TREE;
9726 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
9727 ARG2, and ARG3. IGNORE is true if the result of the function call is
9728 ignored. This function returns NULL_TREE if no simplification was
9729 possible. */
9731 static tree
9732 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
9733 bool ignore)
9735 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9737 switch (fcode)
9739 case BUILT_IN_MEMCPY_CHK:
9740 case BUILT_IN_MEMPCPY_CHK:
9741 case BUILT_IN_MEMMOVE_CHK:
9742 case BUILT_IN_MEMSET_CHK:
9743 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
9744 NULL_TREE, ignore,
9745 DECL_FUNCTION_CODE (fndecl));
9747 case BUILT_IN_STRNCPY_CHK:
9748 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
9750 case BUILT_IN_STRNCAT_CHK:
9751 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
9753 case BUILT_IN_FPRINTF_CHK:
9754 case BUILT_IN_VFPRINTF_CHK:
9755 if (!validate_arg (arg1, INTEGER_TYPE)
9756 || TREE_SIDE_EFFECTS (arg1))
9757 return NULL_TREE;
9758 else
9759 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
9760 ignore, fcode);
9761 break;
9763 default:
9764 break;
9766 return NULL_TREE;
9769 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9770 arguments, where NARGS <= 4. IGNORE is true if the result of the
9771 function call is ignored. This function returns NULL_TREE if no
9772 simplification was possible. Note that this only folds builtins with
9773 fixed argument patterns. Foldings that do varargs-to-varargs
9774 transformations, or that match calls with more than 4 arguments,
9775 need to be handled with fold_builtin_varargs instead. */
9777 #define MAX_ARGS_TO_FOLD_BUILTIN 4
9779 static tree
9780 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
9782 tree ret = NULL_TREE;
9783 switch (nargs)
9785 case 0:
9786 ret = fold_builtin_0 (fndecl, ignore);
9787 break;
9788 case 1:
9789 ret = fold_builtin_1 (fndecl, args[0], ignore);
9790 break;
9791 case 2:
9792 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
9793 break;
9794 case 3:
9795 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
9796 break;
9797 case 4:
9798 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
9799 ignore);
9800 break;
9801 default:
9802 break;
9804 if (ret)
9806 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
9807 TREE_NO_WARNING (ret) = 1;
9808 return ret;
9810 return NULL_TREE;
9813 /* Builtins with folding operations that operate on "..." arguments
9814 need special handling; we need to store the arguments in a convenient
9815 data structure before attempting any folding. Fortunately there are
9816 only a few builtins that fall into this category. FNDECL is the
9817 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
9818 result of the function call is ignored. */
9820 static tree
9821 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
9823 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9824 tree ret = NULL_TREE;
9826 switch (fcode)
9828 case BUILT_IN_SPRINTF_CHK:
9829 case BUILT_IN_VSPRINTF_CHK:
9830 ret = fold_builtin_sprintf_chk (exp, fcode);
9831 break;
9833 case BUILT_IN_SNPRINTF_CHK:
9834 case BUILT_IN_VSNPRINTF_CHK:
9835 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
9837 default:
9838 break;
9840 if (ret)
9842 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9843 TREE_NO_WARNING (ret) = 1;
9844 return ret;
9846 return NULL_TREE;
9849 /* A wrapper function for builtin folding that prevents warnings for
9850 "statement without effect" and the like, caused by removing the
9851 call node earlier than the warning is generated. */
9853 tree
9854 fold_call_expr (tree exp, bool ignore)
9856 tree ret = NULL_TREE;
9857 tree fndecl = get_callee_fndecl (exp);
9858 if (fndecl
9859 && TREE_CODE (fndecl) == FUNCTION_DECL
9860 && DECL_BUILT_IN (fndecl))
9862 /* FIXME: Don't use a list in this interface. */
9863 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9864 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
9865 else
9867 int nargs = call_expr_nargs (exp);
9868 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
9870 tree *args = CALL_EXPR_ARGP (exp);
9871 ret = fold_builtin_n (fndecl, args, nargs, ignore);
9873 if (!ret)
9874 ret = fold_builtin_varargs (fndecl, exp, ignore);
9875 if (ret)
9877 /* Propagate location information from original call to
9878 expansion of builtin. Otherwise things like
9879 maybe_emit_chk_warning, that operate on the expansion
9880 of a builtin, will use the wrong location information. */
9881 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
9883 tree realret = ret;
9884 if (TREE_CODE (ret) == NOP_EXPR)
9885 realret = TREE_OPERAND (ret, 0);
9886 if (CAN_HAVE_LOCATION_P (realret)
9887 && !EXPR_HAS_LOCATION (realret))
9888 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
9890 return ret;
9894 return NULL_TREE;
9897 /* Conveniently construct a function call expression. FNDECL names the
9898 function to be called and ARGLIST is a TREE_LIST of arguments. */
9900 tree
9901 build_function_call_expr (tree fndecl, tree arglist)
9903 tree fntype = TREE_TYPE (fndecl);
9904 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
9905 return fold_builtin_call_list (TREE_TYPE (fntype), fn, arglist);
9908 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
9909 ARGLIST is a TREE_LIST of arguments. */
9911 tree
9912 fold_builtin_call_list (tree type, tree fn, tree arglist)
9914 tree ret = NULL_TREE;
9915 if (TREE_CODE (fn) == ADDR_EXPR)
9917 tree fndecl = TREE_OPERAND (fn, 0);
9918 if (TREE_CODE (fndecl) == FUNCTION_DECL
9919 && DECL_BUILT_IN (fndecl))
9921 /* FIXME: Don't use a list in this interface. */
9922 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9924 ret = targetm.fold_builtin (fndecl, arglist, false);
9925 if (ret)
9926 return ret;
9928 else
9930 tree tail = arglist;
9931 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
9932 int nargs;
9933 tree exp;
9935 for (nargs = 0; nargs < MAX_ARGS_TO_FOLD_BUILTIN; nargs++)
9937 if (!tail)
9938 break;
9939 args[nargs] = TREE_VALUE (tail);
9940 tail = TREE_CHAIN (tail);
9942 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
9944 ret = fold_builtin_n (fndecl, args, nargs, false);
9945 if (ret)
9946 return ret;
9948 exp = build_call_list (type, fn, arglist);
9949 ret = fold_builtin_varargs (fndecl, exp, false);
9950 return ret ? ret : exp;
9954 return build_call_list (type, fn, arglist);
9957 /* Conveniently construct a function call expression. FNDECL names the
9958 function to be called, N is the number of arguments, and the "..."
9959 parameters are the argument expressions. */
9961 tree
9962 build_call_expr (tree fndecl, int n, ...)
9964 va_list ap;
9965 tree ret;
9966 tree fntype = TREE_TYPE (fndecl);
9967 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
9969 va_start (ap, n);
9970 ret = fold_builtin_call_valist (TREE_TYPE (fntype), fn, n, ap);
9971 va_end (ap);
9972 return ret;
9975 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
9976 N arguments are passed in the va_list AP. */
9978 tree
9979 fold_builtin_call_valist (tree type,
9980 tree fn,
9981 int n,
9982 va_list ap)
9984 tree ret = NULL_TREE;
9985 int i;
9986 tree exp;
9988 if (TREE_CODE (fn) == ADDR_EXPR)
9990 tree fndecl = TREE_OPERAND (fn, 0);
9991 if (TREE_CODE (fndecl) == FUNCTION_DECL
9992 && DECL_BUILT_IN (fndecl))
9994 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9996 tree arglist = NULL_TREE;
9997 va_list ap0;
9998 va_copy (ap0, ap);
9999 for (i = 0; i < n; i++)
10001 tree arg = va_arg (ap0, tree);
10002 arglist = tree_cons (NULL_TREE, arg, arglist);
10004 va_end (ap0);
10005 arglist = nreverse (arglist);
10006 ret = targetm.fold_builtin (fndecl, arglist, false);
10007 if (ret)
10008 return ret;
10010 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10012 /* First try the transformations that don't require consing up
10013 an exp. */
10014 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
10015 va_list ap0;
10016 va_copy (ap0, ap);
10017 for (i = 0; i < n; i++)
10018 args[i] = va_arg (ap0, tree);
10019 va_end (ap0);
10020 ret = fold_builtin_n (fndecl, args, n, false);
10021 if (ret)
10022 return ret;
10025 /* If we got this far, we need to build an exp. */
10026 exp = build_call_valist (type, fn, n, ap);
10027 ret = fold_builtin_varargs (fndecl, exp, false);
10028 return ret ? ret : exp;
10032 return build_call_valist (type, fn, n, ap);
10035 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10036 along with N new arguments specified as the "..." parameters. SKIP
10037 is the number of arguments in EXP to be omitted. This function is used
10038 to do varargs-to-varargs transformations. */
10040 static tree
10041 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10043 int oldnargs = call_expr_nargs (exp);
10044 int nargs = oldnargs - skip + n;
10045 tree fntype = TREE_TYPE (fndecl);
10046 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10047 tree *buffer;
10049 if (n > 0)
10051 int i, j;
10052 va_list ap;
10054 buffer = alloca (nargs * sizeof (tree));
10055 va_start (ap, n);
10056 for (i = 0; i < n; i++)
10057 buffer[i] = va_arg (ap, tree);
10058 va_end (ap);
10059 for (j = skip; j < oldnargs; j++, i++)
10060 buffer[i] = CALL_EXPR_ARG (exp, j);
10062 else
10063 buffer = CALL_EXPR_ARGP (exp) + skip;
10065 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10068 /* Validate a single argument ARG against a tree code CODE representing
10069 a type. */
10071 static bool
10072 validate_arg (tree arg, enum tree_code code)
10074 if (!arg)
10075 return false;
10076 else if (code == POINTER_TYPE)
10077 return POINTER_TYPE_P (TREE_TYPE (arg));
10078 return code == TREE_CODE (TREE_TYPE (arg));
10081 /* This function validates the types of a function call argument list
10082 against a specified list of tree_codes. If the last specifier is a 0,
10083 that represents an ellipses, otherwise the last specifier must be a
10084 VOID_TYPE. */
10086 bool
10087 validate_arglist (tree callexpr, ...)
10089 enum tree_code code;
10090 bool res = 0;
10091 va_list ap;
10092 call_expr_arg_iterator iter;
10093 tree arg;
10095 va_start (ap, callexpr);
10096 init_call_expr_arg_iterator (callexpr, &iter);
10100 code = va_arg (ap, enum tree_code);
10101 switch (code)
10103 case 0:
10104 /* This signifies an ellipses, any further arguments are all ok. */
10105 res = true;
10106 goto end;
10107 case VOID_TYPE:
10108 /* This signifies an endlink, if no arguments remain, return
10109 true, otherwise return false. */
10110 res = !more_call_expr_args_p (&iter);
10111 goto end;
10112 default:
10113 /* If no parameters remain or the parameter's code does not
10114 match the specified code, return false. Otherwise continue
10115 checking any remaining arguments. */
10116 arg = next_call_expr_arg (&iter);
10117 if (!validate_arg (arg, code))
10118 goto end;
10119 break;
10122 while (1);
10124 /* We need gotos here since we can only have one VA_CLOSE in a
10125 function. */
10126 end: ;
10127 va_end (ap);
10129 return res;
10132 /* Default target-specific builtin expander that does nothing. */
10135 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10136 rtx target ATTRIBUTE_UNUSED,
10137 rtx subtarget ATTRIBUTE_UNUSED,
10138 enum machine_mode mode ATTRIBUTE_UNUSED,
10139 int ignore ATTRIBUTE_UNUSED)
10141 return NULL_RTX;
10144 /* Returns true is EXP represents data that would potentially reside
10145 in a readonly section. */
10147 static bool
10148 readonly_data_expr (tree exp)
10150 STRIP_NOPS (exp);
10152 if (TREE_CODE (exp) != ADDR_EXPR)
10153 return false;
10155 exp = get_base_address (TREE_OPERAND (exp, 0));
10156 if (!exp)
10157 return false;
10159 /* Make sure we call decl_readonly_section only for trees it
10160 can handle (since it returns true for everything it doesn't
10161 understand). */
10162 if (TREE_CODE (exp) == STRING_CST
10163 || TREE_CODE (exp) == CONSTRUCTOR
10164 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10165 return decl_readonly_section (exp, 0);
10166 else
10167 return false;
10170 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10171 to the call, and TYPE is its return type.
10173 Return NULL_TREE if no simplification was possible, otherwise return the
10174 simplified form of the call as a tree.
10176 The simplified form may be a constant or other expression which
10177 computes the same value, but in a more efficient manner (including
10178 calls to other builtin functions).
10180 The call may contain arguments which need to be evaluated, but
10181 which are not useful to determine the result of the call. In
10182 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10183 COMPOUND_EXPR will be an argument which must be evaluated.
10184 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10185 COMPOUND_EXPR in the chain will contain the tree for the simplified
10186 form of the builtin function call. */
10188 static tree
10189 fold_builtin_strstr (tree s1, tree s2, tree type)
10191 if (!validate_arg (s1, POINTER_TYPE)
10192 || !validate_arg (s2, POINTER_TYPE))
10193 return NULL_TREE;
10194 else
10196 tree fn;
10197 const char *p1, *p2;
10199 p2 = c_getstr (s2);
10200 if (p2 == NULL)
10201 return NULL_TREE;
10203 p1 = c_getstr (s1);
10204 if (p1 != NULL)
10206 const char *r = strstr (p1, p2);
10207 tree tem;
10209 if (r == NULL)
10210 return build_int_cst (TREE_TYPE (s1), 0);
10212 /* Return an offset into the constant string argument. */
10213 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10214 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10215 return fold_convert (type, tem);
10218 /* The argument is const char *, and the result is char *, so we need
10219 a type conversion here to avoid a warning. */
10220 if (p2[0] == '\0')
10221 return fold_convert (type, s1);
10223 if (p2[1] != '\0')
10224 return NULL_TREE;
10226 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10227 if (!fn)
10228 return NULL_TREE;
10230 /* New argument list transforming strstr(s1, s2) to
10231 strchr(s1, s2[0]). */
10232 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10236 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10237 the call, and TYPE is its return type.
10239 Return NULL_TREE if no simplification was possible, otherwise return the
10240 simplified form of the call as a tree.
10242 The simplified form may be a constant or other expression which
10243 computes the same value, but in a more efficient manner (including
10244 calls to other builtin functions).
10246 The call may contain arguments which need to be evaluated, but
10247 which are not useful to determine the result of the call. In
10248 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10249 COMPOUND_EXPR will be an argument which must be evaluated.
10250 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10251 COMPOUND_EXPR in the chain will contain the tree for the simplified
10252 form of the builtin function call. */
10254 static tree
10255 fold_builtin_strchr (tree s1, tree s2, tree type)
10257 if (!validate_arg (s1, POINTER_TYPE)
10258 || !validate_arg (s2, INTEGER_TYPE))
10259 return NULL_TREE;
10260 else
10262 const char *p1;
10264 if (TREE_CODE (s2) != INTEGER_CST)
10265 return NULL_TREE;
10267 p1 = c_getstr (s1);
10268 if (p1 != NULL)
10270 char c;
10271 const char *r;
10272 tree tem;
10274 if (target_char_cast (s2, &c))
10275 return NULL_TREE;
10277 r = strchr (p1, c);
10279 if (r == NULL)
10280 return build_int_cst (TREE_TYPE (s1), 0);
10282 /* Return an offset into the constant string argument. */
10283 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10284 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10285 return fold_convert (type, tem);
10287 return NULL_TREE;
10291 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10292 the call, and TYPE is its return type.
10294 Return NULL_TREE if no simplification was possible, otherwise return the
10295 simplified form of the call as a tree.
10297 The simplified form may be a constant or other expression which
10298 computes the same value, but in a more efficient manner (including
10299 calls to other builtin functions).
10301 The call may contain arguments which need to be evaluated, but
10302 which are not useful to determine the result of the call. In
10303 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10304 COMPOUND_EXPR will be an argument which must be evaluated.
10305 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10306 COMPOUND_EXPR in the chain will contain the tree for the simplified
10307 form of the builtin function call. */
10309 static tree
10310 fold_builtin_strrchr (tree s1, tree s2, tree type)
10312 if (!validate_arg (s1, POINTER_TYPE)
10313 || !validate_arg (s2, INTEGER_TYPE))
10314 return NULL_TREE;
10315 else
10317 tree fn;
10318 const char *p1;
10320 if (TREE_CODE (s2) != INTEGER_CST)
10321 return NULL_TREE;
10323 p1 = c_getstr (s1);
10324 if (p1 != NULL)
10326 char c;
10327 const char *r;
10328 tree tem;
10330 if (target_char_cast (s2, &c))
10331 return NULL_TREE;
10333 r = strrchr (p1, c);
10335 if (r == NULL)
10336 return build_int_cst (TREE_TYPE (s1), 0);
10338 /* Return an offset into the constant string argument. */
10339 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10340 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10341 return fold_convert (type, tem);
10344 if (! integer_zerop (s2))
10345 return NULL_TREE;
10347 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10348 if (!fn)
10349 return NULL_TREE;
10351 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10352 return build_call_expr (fn, 2, s1, s2);
10356 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10357 to the call, and TYPE is its return type.
10359 Return NULL_TREE if no simplification was possible, otherwise return the
10360 simplified form of the call as a tree.
10362 The simplified form may be a constant or other expression which
10363 computes the same value, but in a more efficient manner (including
10364 calls to other builtin functions).
10366 The call may contain arguments which need to be evaluated, but
10367 which are not useful to determine the result of the call. In
10368 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10369 COMPOUND_EXPR will be an argument which must be evaluated.
10370 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10371 COMPOUND_EXPR in the chain will contain the tree for the simplified
10372 form of the builtin function call. */
10374 static tree
10375 fold_builtin_strpbrk (tree s1, tree s2, tree type)
10377 if (!validate_arg (s1, POINTER_TYPE)
10378 || !validate_arg (s2, POINTER_TYPE))
10379 return NULL_TREE;
10380 else
10382 tree fn;
10383 const char *p1, *p2;
10385 p2 = c_getstr (s2);
10386 if (p2 == NULL)
10387 return NULL_TREE;
10389 p1 = c_getstr (s1);
10390 if (p1 != NULL)
10392 const char *r = strpbrk (p1, p2);
10393 tree tem;
10395 if (r == NULL)
10396 return build_int_cst (TREE_TYPE (s1), 0);
10398 /* Return an offset into the constant string argument. */
10399 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10400 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10401 return fold_convert (type, tem);
10404 if (p2[0] == '\0')
10405 /* strpbrk(x, "") == NULL.
10406 Evaluate and ignore s1 in case it had side-effects. */
10407 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
10409 if (p2[1] != '\0')
10410 return NULL_TREE; /* Really call strpbrk. */
10412 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10413 if (!fn)
10414 return NULL_TREE;
10416 /* New argument list transforming strpbrk(s1, s2) to
10417 strchr(s1, s2[0]). */
10418 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10422 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10423 to the call.
10425 Return NULL_TREE if no simplification was possible, otherwise return the
10426 simplified form of the call as a tree.
10428 The simplified form may be a constant or other expression which
10429 computes the same value, but in a more efficient manner (including
10430 calls to other builtin functions).
10432 The call may contain arguments which need to be evaluated, but
10433 which are not useful to determine the result of the call. In
10434 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10435 COMPOUND_EXPR will be an argument which must be evaluated.
10436 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10437 COMPOUND_EXPR in the chain will contain the tree for the simplified
10438 form of the builtin function call. */
10440 static tree
10441 fold_builtin_strcat (tree dst, tree src)
10443 if (!validate_arg (dst, POINTER_TYPE)
10444 || !validate_arg (src, POINTER_TYPE))
10445 return NULL_TREE;
10446 else
10448 const char *p = c_getstr (src);
10450 /* If the string length is zero, return the dst parameter. */
10451 if (p && *p == '\0')
10452 return dst;
10454 return NULL_TREE;
10458 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10459 arguments to the call.
10461 Return NULL_TREE if no simplification was possible, otherwise return the
10462 simplified form of the call as a tree.
10464 The simplified form may be a constant or other expression which
10465 computes the same value, but in a more efficient manner (including
10466 calls to other builtin functions).
10468 The call may contain arguments which need to be evaluated, but
10469 which are not useful to determine the result of the call. In
10470 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10471 COMPOUND_EXPR will be an argument which must be evaluated.
10472 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10473 COMPOUND_EXPR in the chain will contain the tree for the simplified
10474 form of the builtin function call. */
10476 static tree
10477 fold_builtin_strncat (tree dst, tree src, tree len)
10479 if (!validate_arg (dst, POINTER_TYPE)
10480 || !validate_arg (src, POINTER_TYPE)
10481 || !validate_arg (len, INTEGER_TYPE))
10482 return NULL_TREE;
10483 else
10485 const char *p = c_getstr (src);
10487 /* If the requested length is zero, or the src parameter string
10488 length is zero, return the dst parameter. */
10489 if (integer_zerop (len) || (p && *p == '\0'))
10490 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
10492 /* If the requested len is greater than or equal to the string
10493 length, call strcat. */
10494 if (TREE_CODE (len) == INTEGER_CST && p
10495 && compare_tree_int (len, strlen (p)) >= 0)
10497 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
10499 /* If the replacement _DECL isn't initialized, don't do the
10500 transformation. */
10501 if (!fn)
10502 return NULL_TREE;
10504 return build_call_expr (fn, 2, dst, src);
10506 return NULL_TREE;
10510 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10511 to the call.
10513 Return NULL_TREE if no simplification was possible, otherwise return the
10514 simplified form of the call as a tree.
10516 The simplified form may be a constant or other expression which
10517 computes the same value, but in a more efficient manner (including
10518 calls to other builtin functions).
10520 The call may contain arguments which need to be evaluated, but
10521 which are not useful to determine the result of the call. In
10522 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10523 COMPOUND_EXPR will be an argument which must be evaluated.
10524 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10525 COMPOUND_EXPR in the chain will contain the tree for the simplified
10526 form of the builtin function call. */
10528 static tree
10529 fold_builtin_strspn (tree s1, tree s2)
10531 if (!validate_arg (s1, POINTER_TYPE)
10532 || !validate_arg (s2, POINTER_TYPE))
10533 return NULL_TREE;
10534 else
10536 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10538 /* If both arguments are constants, evaluate at compile-time. */
10539 if (p1 && p2)
10541 const size_t r = strspn (p1, p2);
10542 return size_int (r);
10545 /* If either argument is "", return NULL_TREE. */
10546 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10547 /* Evaluate and ignore both arguments in case either one has
10548 side-effects. */
10549 return omit_two_operands (integer_type_node, integer_zero_node,
10550 s1, s2);
10551 return NULL_TREE;
10555 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10556 to the call.
10558 Return NULL_TREE if no simplification was possible, otherwise return the
10559 simplified form of the call as a tree.
10561 The simplified form may be a constant or other expression which
10562 computes the same value, but in a more efficient manner (including
10563 calls to other builtin functions).
10565 The call may contain arguments which need to be evaluated, but
10566 which are not useful to determine the result of the call. In
10567 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10568 COMPOUND_EXPR will be an argument which must be evaluated.
10569 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10570 COMPOUND_EXPR in the chain will contain the tree for the simplified
10571 form of the builtin function call. */
10573 static tree
10574 fold_builtin_strcspn (tree s1, tree s2)
10576 if (!validate_arg (s1, POINTER_TYPE)
10577 || !validate_arg (s2, POINTER_TYPE))
10578 return NULL_TREE;
10579 else
10581 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10583 /* If both arguments are constants, evaluate at compile-time. */
10584 if (p1 && p2)
10586 const size_t r = strcspn (p1, p2);
10587 return size_int (r);
10590 /* If the first argument is "", return NULL_TREE. */
10591 if (p1 && *p1 == '\0')
10593 /* Evaluate and ignore argument s2 in case it has
10594 side-effects. */
10595 return omit_one_operand (integer_type_node,
10596 integer_zero_node, s2);
10599 /* If the second argument is "", return __builtin_strlen(s1). */
10600 if (p2 && *p2 == '\0')
10602 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
10604 /* If the replacement _DECL isn't initialized, don't do the
10605 transformation. */
10606 if (!fn)
10607 return NULL_TREE;
10609 return build_call_expr (fn, 1, s1);
10611 return NULL_TREE;
10615 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
10616 to the call. IGNORE is true if the value returned
10617 by the builtin will be ignored. UNLOCKED is true is true if this
10618 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
10619 the known length of the string. Return NULL_TREE if no simplification
10620 was possible. */
10622 tree
10623 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
10625 /* If we're using an unlocked function, assume the other unlocked
10626 functions exist explicitly. */
10627 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
10628 : implicit_built_in_decls[BUILT_IN_FPUTC];
10629 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
10630 : implicit_built_in_decls[BUILT_IN_FWRITE];
10632 /* If the return value is used, don't do the transformation. */
10633 if (!ignore)
10634 return NULL_TREE;
10636 /* Verify the arguments in the original call. */
10637 if (!validate_arg (arg0, POINTER_TYPE)
10638 || !validate_arg (arg1, POINTER_TYPE))
10639 return NULL_TREE;
10641 if (! len)
10642 len = c_strlen (arg0, 0);
10644 /* Get the length of the string passed to fputs. If the length
10645 can't be determined, punt. */
10646 if (!len
10647 || TREE_CODE (len) != INTEGER_CST)
10648 return NULL_TREE;
10650 switch (compare_tree_int (len, 1))
10652 case -1: /* length is 0, delete the call entirely . */
10653 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
10655 case 0: /* length is 1, call fputc. */
10657 const char *p = c_getstr (arg0);
10659 if (p != NULL)
10661 if (fn_fputc)
10662 return build_call_expr (fn_fputc, 2,
10663 build_int_cst (NULL_TREE, p[0]), arg1);
10664 else
10665 return NULL_TREE;
10668 /* FALLTHROUGH */
10669 case 1: /* length is greater than 1, call fwrite. */
10671 /* If optimizing for size keep fputs. */
10672 if (optimize_size)
10673 return NULL_TREE;
10674 /* New argument list transforming fputs(string, stream) to
10675 fwrite(string, 1, len, stream). */
10676 if (fn_fwrite)
10677 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
10678 else
10679 return NULL_TREE;
10681 default:
10682 gcc_unreachable ();
10684 return NULL_TREE;
10687 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10688 produced. False otherwise. This is done so that we don't output the error
10689 or warning twice or three times. */
10690 bool
10691 fold_builtin_next_arg (tree exp, bool va_start_p)
10693 tree fntype = TREE_TYPE (current_function_decl);
10694 int nargs = call_expr_nargs (exp);
10695 tree arg;
10697 if (TYPE_ARG_TYPES (fntype) == 0
10698 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
10699 == void_type_node))
10701 error ("%<va_start%> used in function with fixed args");
10702 return true;
10705 if (va_start_p)
10707 if (va_start_p && (nargs != 2))
10709 error ("wrong number of arguments to function %<va_start%>");
10710 return true;
10712 arg = CALL_EXPR_ARG (exp, 1);
10714 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10715 when we checked the arguments and if needed issued a warning. */
10716 else
10718 if (nargs == 0)
10720 /* Evidently an out of date version of <stdarg.h>; can't validate
10721 va_start's second argument, but can still work as intended. */
10722 warning (0, "%<__builtin_next_arg%> called without an argument");
10723 return true;
10725 else if (nargs > 1)
10727 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10728 return true;
10730 arg = CALL_EXPR_ARG (exp, 0);
10733 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10734 or __builtin_next_arg (0) the first time we see it, after checking
10735 the arguments and if needed issuing a warning. */
10736 if (!integer_zerop (arg))
10738 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10740 /* Strip off all nops for the sake of the comparison. This
10741 is not quite the same as STRIP_NOPS. It does more.
10742 We must also strip off INDIRECT_EXPR for C++ reference
10743 parameters. */
10744 while (TREE_CODE (arg) == NOP_EXPR
10745 || TREE_CODE (arg) == CONVERT_EXPR
10746 || TREE_CODE (arg) == NON_LVALUE_EXPR
10747 || TREE_CODE (arg) == INDIRECT_REF)
10748 arg = TREE_OPERAND (arg, 0);
10749 if (arg != last_parm)
10751 /* FIXME: Sometimes with the tree optimizers we can get the
10752 not the last argument even though the user used the last
10753 argument. We just warn and set the arg to be the last
10754 argument so that we will get wrong-code because of
10755 it. */
10756 warning (0, "second parameter of %<va_start%> not last named argument");
10758 /* We want to verify the second parameter just once before the tree
10759 optimizers are run and then avoid keeping it in the tree,
10760 as otherwise we could warn even for correct code like:
10761 void foo (int i, ...)
10762 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10763 if (va_start_p)
10764 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10765 else
10766 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10768 return false;
10772 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
10773 ORIG may be null if this is a 2-argument call. We don't attempt to
10774 simplify calls with more than 3 arguments.
10776 Return NULL_TREE if no simplification was possible, otherwise return the
10777 simplified form of the call as a tree. If IGNORED is true, it means that
10778 the caller does not use the returned value of the function. */
10780 static tree
10781 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
10783 tree call, retval;
10784 const char *fmt_str = NULL;
10786 /* Verify the required arguments in the original call. We deal with two
10787 types of sprintf() calls: 'sprintf (str, fmt)' and
10788 'sprintf (dest, "%s", orig)'. */
10789 if (!validate_arg (dest, POINTER_TYPE)
10790 || !validate_arg (fmt, POINTER_TYPE))
10791 return NULL_TREE;
10792 if (orig && !validate_arg (orig, POINTER_TYPE))
10793 return NULL_TREE;
10795 /* Check whether the format is a literal string constant. */
10796 fmt_str = c_getstr (fmt);
10797 if (fmt_str == NULL)
10798 return NULL_TREE;
10800 call = NULL_TREE;
10801 retval = NULL_TREE;
10803 if (!init_target_chars ())
10804 return NULL_TREE;
10806 /* If the format doesn't contain % args or %%, use strcpy. */
10807 if (strchr (fmt_str, target_percent) == NULL)
10809 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10811 if (!fn)
10812 return NULL_TREE;
10814 /* Don't optimize sprintf (buf, "abc", ptr++). */
10815 if (orig)
10816 return NULL_TREE;
10818 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
10819 'format' is known to contain no % formats. */
10820 call = build_call_expr (fn, 2, dest, fmt);
10821 if (!ignored)
10822 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
10825 /* If the format is "%s", use strcpy if the result isn't used. */
10826 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
10828 tree fn;
10829 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10831 if (!fn)
10832 return NULL_TREE;
10834 /* Don't crash on sprintf (str1, "%s"). */
10835 if (!orig)
10836 return NULL_TREE;
10838 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
10839 if (!ignored)
10841 retval = c_strlen (orig, 1);
10842 if (!retval || TREE_CODE (retval) != INTEGER_CST)
10843 return NULL_TREE;
10845 call = build_call_expr (fn, 2, dest, orig);
10848 if (call && retval)
10850 retval = fold_convert
10851 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
10852 retval);
10853 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
10855 else
10856 return call;
10859 /* Expand a call EXP to __builtin_object_size. */
10862 expand_builtin_object_size (tree exp)
10864 tree ost;
10865 int object_size_type;
10866 tree fndecl = get_callee_fndecl (exp);
10867 location_t locus = EXPR_LOCATION (exp);
10869 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10871 error ("%Hfirst argument of %D must be a pointer, second integer constant",
10872 &locus, fndecl);
10873 expand_builtin_trap ();
10874 return const0_rtx;
10877 ost = CALL_EXPR_ARG (exp, 1);
10878 STRIP_NOPS (ost);
10880 if (TREE_CODE (ost) != INTEGER_CST
10881 || tree_int_cst_sgn (ost) < 0
10882 || compare_tree_int (ost, 3) > 0)
10884 error ("%Hlast argument of %D is not integer constant between 0 and 3",
10885 &locus, fndecl);
10886 expand_builtin_trap ();
10887 return const0_rtx;
10890 object_size_type = tree_low_cst (ost, 0);
10892 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10895 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10896 FCODE is the BUILT_IN_* to use.
10897 Return NULL_RTX if we failed; the caller should emit a normal call,
10898 otherwise try to get the result in TARGET, if convenient (and in
10899 mode MODE if that's convenient). */
10901 static rtx
10902 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
10903 enum built_in_function fcode)
10905 tree dest, src, len, size;
10907 if (!validate_arglist (exp,
10908 POINTER_TYPE,
10909 fcode == BUILT_IN_MEMSET_CHK
10910 ? INTEGER_TYPE : POINTER_TYPE,
10911 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10912 return NULL_RTX;
10914 dest = CALL_EXPR_ARG (exp, 0);
10915 src = CALL_EXPR_ARG (exp, 1);
10916 len = CALL_EXPR_ARG (exp, 2);
10917 size = CALL_EXPR_ARG (exp, 3);
10919 if (! host_integerp (size, 1))
10920 return NULL_RTX;
10922 if (host_integerp (len, 1) || integer_all_onesp (size))
10924 tree fn;
10926 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
10928 location_t locus = EXPR_LOCATION (exp);
10929 warning (0, "%Hcall to %D will always overflow destination buffer",
10930 &locus, get_callee_fndecl (exp));
10931 return NULL_RTX;
10934 fn = NULL_TREE;
10935 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10936 mem{cpy,pcpy,move,set} is available. */
10937 switch (fcode)
10939 case BUILT_IN_MEMCPY_CHK:
10940 fn = built_in_decls[BUILT_IN_MEMCPY];
10941 break;
10942 case BUILT_IN_MEMPCPY_CHK:
10943 fn = built_in_decls[BUILT_IN_MEMPCPY];
10944 break;
10945 case BUILT_IN_MEMMOVE_CHK:
10946 fn = built_in_decls[BUILT_IN_MEMMOVE];
10947 break;
10948 case BUILT_IN_MEMSET_CHK:
10949 fn = built_in_decls[BUILT_IN_MEMSET];
10950 break;
10951 default:
10952 break;
10955 if (! fn)
10956 return NULL_RTX;
10958 fn = build_call_expr (fn, 3, dest, src, len);
10959 if (TREE_CODE (fn) == CALL_EXPR)
10960 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10961 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10963 else if (fcode == BUILT_IN_MEMSET_CHK)
10964 return NULL_RTX;
10965 else
10967 unsigned int dest_align
10968 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
10970 /* If DEST is not a pointer type, call the normal function. */
10971 if (dest_align == 0)
10972 return NULL_RTX;
10974 /* If SRC and DEST are the same (and not volatile), do nothing. */
10975 if (operand_equal_p (src, dest, 0))
10977 tree expr;
10979 if (fcode != BUILT_IN_MEMPCPY_CHK)
10981 /* Evaluate and ignore LEN in case it has side-effects. */
10982 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10983 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10986 len = fold_convert (TREE_TYPE (dest), len);
10987 expr = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
10988 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10991 /* __memmove_chk special case. */
10992 if (fcode == BUILT_IN_MEMMOVE_CHK)
10994 unsigned int src_align
10995 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
10997 if (src_align == 0)
10998 return NULL_RTX;
11000 /* If src is categorized for a readonly section we can use
11001 normal __memcpy_chk. */
11002 if (readonly_data_expr (src))
11004 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11005 if (!fn)
11006 return NULL_RTX;
11007 fn = build_call_expr (fn, 4, dest, src, len, size);
11008 if (TREE_CODE (fn) == CALL_EXPR)
11009 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11010 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11013 return NULL_RTX;
11017 /* Emit warning if a buffer overflow is detected at compile time. */
11019 static void
11020 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11022 int is_strlen = 0;
11023 tree len, size;
11024 location_t locus;
11026 switch (fcode)
11028 case BUILT_IN_STRCPY_CHK:
11029 case BUILT_IN_STPCPY_CHK:
11030 /* For __strcat_chk the warning will be emitted only if overflowing
11031 by at least strlen (dest) + 1 bytes. */
11032 case BUILT_IN_STRCAT_CHK:
11033 len = CALL_EXPR_ARG (exp, 1);
11034 size = CALL_EXPR_ARG (exp, 2);
11035 is_strlen = 1;
11036 break;
11037 case BUILT_IN_STRNCAT_CHK:
11038 case BUILT_IN_STRNCPY_CHK:
11039 len = CALL_EXPR_ARG (exp, 2);
11040 size = CALL_EXPR_ARG (exp, 3);
11041 break;
11042 case BUILT_IN_SNPRINTF_CHK:
11043 case BUILT_IN_VSNPRINTF_CHK:
11044 len = CALL_EXPR_ARG (exp, 1);
11045 size = CALL_EXPR_ARG (exp, 3);
11046 break;
11047 default:
11048 gcc_unreachable ();
11051 if (!len || !size)
11052 return;
11054 if (! host_integerp (size, 1) || integer_all_onesp (size))
11055 return;
11057 if (is_strlen)
11059 len = c_strlen (len, 1);
11060 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11061 return;
11063 else if (fcode == BUILT_IN_STRNCAT_CHK)
11065 tree src = CALL_EXPR_ARG (exp, 1);
11066 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11067 return;
11068 src = c_strlen (src, 1);
11069 if (! src || ! host_integerp (src, 1))
11071 locus = EXPR_LOCATION (exp);
11072 warning (0, "%Hcall to %D might overflow destination buffer",
11073 &locus, get_callee_fndecl (exp));
11074 return;
11076 else if (tree_int_cst_lt (src, size))
11077 return;
11079 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11080 return;
11082 locus = EXPR_LOCATION (exp);
11083 warning (0, "%Hcall to %D will always overflow destination buffer",
11084 &locus, get_callee_fndecl (exp));
11087 /* Emit warning if a buffer overflow is detected at compile time
11088 in __sprintf_chk/__vsprintf_chk calls. */
11090 static void
11091 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11093 tree dest, size, len, fmt, flag;
11094 const char *fmt_str;
11095 int nargs = call_expr_nargs (exp);
11097 /* Verify the required arguments in the original call. */
11099 if (nargs < 4)
11100 return;
11101 dest = CALL_EXPR_ARG (exp, 0);
11102 flag = CALL_EXPR_ARG (exp, 1);
11103 size = CALL_EXPR_ARG (exp, 2);
11104 fmt = CALL_EXPR_ARG (exp, 3);
11106 if (! host_integerp (size, 1) || integer_all_onesp (size))
11107 return;
11109 /* Check whether the format is a literal string constant. */
11110 fmt_str = c_getstr (fmt);
11111 if (fmt_str == NULL)
11112 return;
11114 if (!init_target_chars ())
11115 return;
11117 /* If the format doesn't contain % args or %%, we know its size. */
11118 if (strchr (fmt_str, target_percent) == 0)
11119 len = build_int_cstu (size_type_node, strlen (fmt_str));
11120 /* If the format is "%s" and first ... argument is a string literal,
11121 we know it too. */
11122 else if (fcode == BUILT_IN_SPRINTF_CHK
11123 && strcmp (fmt_str, target_percent_s) == 0)
11125 tree arg;
11127 if (nargs < 5)
11128 return;
11129 arg = CALL_EXPR_ARG (exp, 4);
11130 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11131 return;
11133 len = c_strlen (arg, 1);
11134 if (!len || ! host_integerp (len, 1))
11135 return;
11137 else
11138 return;
11140 if (! tree_int_cst_lt (len, size))
11142 location_t locus = EXPR_LOCATION (exp);
11143 warning (0, "%Hcall to %D will always overflow destination buffer",
11144 &locus, get_callee_fndecl (exp));
11148 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11149 if possible. */
11151 tree
11152 fold_builtin_object_size (tree ptr, tree ost)
11154 tree ret = NULL_TREE;
11155 int object_size_type;
11157 if (!validate_arg (ptr, POINTER_TYPE)
11158 || !validate_arg (ost, INTEGER_TYPE))
11159 return NULL_TREE;
11161 STRIP_NOPS (ost);
11163 if (TREE_CODE (ost) != INTEGER_CST
11164 || tree_int_cst_sgn (ost) < 0
11165 || compare_tree_int (ost, 3) > 0)
11166 return NULL_TREE;
11168 object_size_type = tree_low_cst (ost, 0);
11170 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11171 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11172 and (size_t) 0 for types 2 and 3. */
11173 if (TREE_SIDE_EFFECTS (ptr))
11174 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11176 if (TREE_CODE (ptr) == ADDR_EXPR)
11177 ret = build_int_cstu (size_type_node,
11178 compute_builtin_object_size (ptr, object_size_type));
11180 else if (TREE_CODE (ptr) == SSA_NAME)
11182 unsigned HOST_WIDE_INT bytes;
11184 /* If object size is not known yet, delay folding until
11185 later. Maybe subsequent passes will help determining
11186 it. */
11187 bytes = compute_builtin_object_size (ptr, object_size_type);
11188 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11189 ? -1 : 0))
11190 ret = build_int_cstu (size_type_node, bytes);
11193 if (ret)
11195 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11196 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11197 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11198 ret = NULL_TREE;
11201 return ret;
11204 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11205 DEST, SRC, LEN, and SIZE are the arguments to the call.
11206 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11207 code of the builtin. If MAXLEN is not NULL, it is maximum length
11208 passed as third argument. */
11210 tree
11211 fold_builtin_memory_chk (tree fndecl,
11212 tree dest, tree src, tree len, tree size,
11213 tree maxlen, bool ignore,
11214 enum built_in_function fcode)
11216 tree fn;
11218 if (!validate_arg (dest, POINTER_TYPE)
11219 || !validate_arg (src,
11220 (fcode == BUILT_IN_MEMSET_CHK
11221 ? INTEGER_TYPE : POINTER_TYPE))
11222 || !validate_arg (len, INTEGER_TYPE)
11223 || !validate_arg (size, INTEGER_TYPE))
11224 return NULL_TREE;
11226 /* If SRC and DEST are the same (and not volatile), return DEST
11227 (resp. DEST+LEN for __mempcpy_chk). */
11228 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11230 if (fcode != BUILT_IN_MEMPCPY_CHK)
11231 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11232 else
11234 tree temp = fold_convert (TREE_TYPE (dest), len);
11235 temp = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, temp);
11236 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11240 if (! host_integerp (size, 1))
11241 return NULL_TREE;
11243 if (! integer_all_onesp (size))
11245 if (! host_integerp (len, 1))
11247 /* If LEN is not constant, try MAXLEN too.
11248 For MAXLEN only allow optimizing into non-_ocs function
11249 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11250 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11252 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11254 /* (void) __mempcpy_chk () can be optimized into
11255 (void) __memcpy_chk (). */
11256 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11257 if (!fn)
11258 return NULL_TREE;
11260 return build_call_expr (fn, 4, dest, src, len, size);
11262 return NULL_TREE;
11265 else
11266 maxlen = len;
11268 if (tree_int_cst_lt (size, maxlen))
11269 return NULL_TREE;
11272 fn = NULL_TREE;
11273 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11274 mem{cpy,pcpy,move,set} is available. */
11275 switch (fcode)
11277 case BUILT_IN_MEMCPY_CHK:
11278 fn = built_in_decls[BUILT_IN_MEMCPY];
11279 break;
11280 case BUILT_IN_MEMPCPY_CHK:
11281 fn = built_in_decls[BUILT_IN_MEMPCPY];
11282 break;
11283 case BUILT_IN_MEMMOVE_CHK:
11284 fn = built_in_decls[BUILT_IN_MEMMOVE];
11285 break;
11286 case BUILT_IN_MEMSET_CHK:
11287 fn = built_in_decls[BUILT_IN_MEMSET];
11288 break;
11289 default:
11290 break;
11293 if (!fn)
11294 return NULL_TREE;
11296 return build_call_expr (fn, 3, dest, src, len);
11299 /* Fold a call to the __st[rp]cpy_chk builtin.
11300 DEST, SRC, and SIZE are the arguments to the call.
11301 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11302 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11303 strings passed as second argument. */
11305 tree
11306 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11307 tree maxlen, bool ignore,
11308 enum built_in_function fcode)
11310 tree len, fn;
11312 if (!validate_arg (dest, POINTER_TYPE)
11313 || !validate_arg (src, POINTER_TYPE)
11314 || !validate_arg (size, INTEGER_TYPE))
11315 return NULL_TREE;
11317 /* If SRC and DEST are the same (and not volatile), return DEST. */
11318 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11319 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11321 if (! host_integerp (size, 1))
11322 return NULL_TREE;
11324 if (! integer_all_onesp (size))
11326 len = c_strlen (src, 1);
11327 if (! len || ! host_integerp (len, 1))
11329 /* If LEN is not constant, try MAXLEN too.
11330 For MAXLEN only allow optimizing into non-_ocs function
11331 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11332 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11334 if (fcode == BUILT_IN_STPCPY_CHK)
11336 if (! ignore)
11337 return NULL_TREE;
11339 /* If return value of __stpcpy_chk is ignored,
11340 optimize into __strcpy_chk. */
11341 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11342 if (!fn)
11343 return NULL_TREE;
11345 return build_call_expr (fn, 3, dest, src, size);
11348 if (! len || TREE_SIDE_EFFECTS (len))
11349 return NULL_TREE;
11351 /* If c_strlen returned something, but not a constant,
11352 transform __strcpy_chk into __memcpy_chk. */
11353 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11354 if (!fn)
11355 return NULL_TREE;
11357 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11358 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11359 build_call_expr (fn, 4,
11360 dest, src, len, size));
11363 else
11364 maxlen = len;
11366 if (! tree_int_cst_lt (maxlen, size))
11367 return NULL_TREE;
11370 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11371 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
11372 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
11373 if (!fn)
11374 return NULL_TREE;
11376 return build_call_expr (fn, 2, dest, src);
11379 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11380 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11381 length passed as third argument. */
11383 tree
11384 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
11385 tree maxlen)
11387 tree fn;
11389 if (!validate_arg (dest, POINTER_TYPE)
11390 || !validate_arg (src, POINTER_TYPE)
11391 || !validate_arg (len, INTEGER_TYPE)
11392 || !validate_arg (size, INTEGER_TYPE))
11393 return NULL_TREE;
11395 if (! host_integerp (size, 1))
11396 return NULL_TREE;
11398 if (! integer_all_onesp (size))
11400 if (! host_integerp (len, 1))
11402 /* If LEN is not constant, try MAXLEN too.
11403 For MAXLEN only allow optimizing into non-_ocs function
11404 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11405 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11406 return NULL_TREE;
11408 else
11409 maxlen = len;
11411 if (tree_int_cst_lt (size, maxlen))
11412 return NULL_TREE;
11415 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11416 fn = built_in_decls[BUILT_IN_STRNCPY];
11417 if (!fn)
11418 return NULL_TREE;
11420 return build_call_expr (fn, 3, dest, src, len);
11423 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11424 are the arguments to the call. */
11426 static tree
11427 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
11429 tree fn;
11430 const char *p;
11432 if (!validate_arg (dest, POINTER_TYPE)
11433 || !validate_arg (src, POINTER_TYPE)
11434 || !validate_arg (size, INTEGER_TYPE))
11435 return NULL_TREE;
11437 p = c_getstr (src);
11438 /* If the SRC parameter is "", return DEST. */
11439 if (p && *p == '\0')
11440 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11442 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
11443 return NULL_TREE;
11445 /* If __builtin_strcat_chk is used, assume strcat is available. */
11446 fn = built_in_decls[BUILT_IN_STRCAT];
11447 if (!fn)
11448 return NULL_TREE;
11450 return build_call_expr (fn, 2, dest, src);
11453 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11454 LEN, and SIZE. */
11456 static tree
11457 fold_builtin_strncat_chk (tree fndecl,
11458 tree dest, tree src, tree len, tree size)
11460 tree fn;
11461 const char *p;
11463 if (!validate_arg (dest, POINTER_TYPE)
11464 || !validate_arg (src, POINTER_TYPE)
11465 || !validate_arg (size, INTEGER_TYPE)
11466 || !validate_arg (size, INTEGER_TYPE))
11467 return NULL_TREE;
11469 p = c_getstr (src);
11470 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11471 if (p && *p == '\0')
11472 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11473 else if (integer_zerop (len))
11474 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11476 if (! host_integerp (size, 1))
11477 return NULL_TREE;
11479 if (! integer_all_onesp (size))
11481 tree src_len = c_strlen (src, 1);
11482 if (src_len
11483 && host_integerp (src_len, 1)
11484 && host_integerp (len, 1)
11485 && ! tree_int_cst_lt (len, src_len))
11487 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11488 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
11489 if (!fn)
11490 return NULL_TREE;
11492 return build_call_expr (fn, 3, dest, src, size);
11494 return NULL_TREE;
11497 /* If __builtin_strncat_chk is used, assume strncat is available. */
11498 fn = built_in_decls[BUILT_IN_STRNCAT];
11499 if (!fn)
11500 return NULL_TREE;
11502 return build_call_expr (fn, 3, dest, src, len);
11505 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
11506 a normal call should be emitted rather than expanding the function
11507 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
11509 static tree
11510 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
11512 tree dest, size, len, fn, fmt, flag;
11513 const char *fmt_str;
11514 int nargs = call_expr_nargs (exp);
11516 /* Verify the required arguments in the original call. */
11517 if (nargs < 4)
11518 return NULL_TREE;
11519 dest = CALL_EXPR_ARG (exp, 0);
11520 if (!validate_arg (dest, POINTER_TYPE))
11521 return NULL_TREE;
11522 flag = CALL_EXPR_ARG (exp, 1);
11523 if (!validate_arg (flag, INTEGER_TYPE))
11524 return NULL_TREE;
11525 size = CALL_EXPR_ARG (exp, 2);
11526 if (!validate_arg (size, INTEGER_TYPE))
11527 return NULL_TREE;
11528 fmt = CALL_EXPR_ARG (exp, 3);
11529 if (!validate_arg (fmt, POINTER_TYPE))
11530 return NULL_TREE;
11532 if (! host_integerp (size, 1))
11533 return NULL_TREE;
11535 len = NULL_TREE;
11537 if (!init_target_chars ())
11538 return NULL_TREE;
11540 /* Check whether the format is a literal string constant. */
11541 fmt_str = c_getstr (fmt);
11542 if (fmt_str != NULL)
11544 /* If the format doesn't contain % args or %%, we know the size. */
11545 if (strchr (fmt_str, target_percent) == 0)
11547 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
11548 len = build_int_cstu (size_type_node, strlen (fmt_str));
11550 /* If the format is "%s" and first ... argument is a string literal,
11551 we know the size too. */
11552 else if (fcode == BUILT_IN_SPRINTF_CHK
11553 && strcmp (fmt_str, target_percent_s) == 0)
11555 tree arg;
11557 if (nargs == 5)
11559 arg = CALL_EXPR_ARG (exp, 4);
11560 if (validate_arg (arg, POINTER_TYPE))
11562 len = c_strlen (arg, 1);
11563 if (! len || ! host_integerp (len, 1))
11564 len = NULL_TREE;
11570 if (! integer_all_onesp (size))
11572 if (! len || ! tree_int_cst_lt (len, size))
11573 return NULL_TREE;
11576 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
11577 or if format doesn't contain % chars or is "%s". */
11578 if (! integer_zerop (flag))
11580 if (fmt_str == NULL)
11581 return NULL_TREE;
11582 if (strchr (fmt_str, target_percent) != NULL
11583 && strcmp (fmt_str, target_percent_s))
11584 return NULL_TREE;
11587 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
11588 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
11589 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
11590 if (!fn)
11591 return NULL_TREE;
11593 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
11596 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
11597 a normal call should be emitted rather than expanding the function
11598 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
11599 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
11600 passed as second argument. */
11602 tree
11603 fold_builtin_snprintf_chk (tree exp, tree maxlen,
11604 enum built_in_function fcode)
11606 tree dest, size, len, fn, fmt, flag;
11607 const char *fmt_str;
11609 /* Verify the required arguments in the original call. */
11610 if (call_expr_nargs (exp) < 5)
11611 return NULL_TREE;
11612 dest = CALL_EXPR_ARG (exp, 0);
11613 if (!validate_arg (dest, POINTER_TYPE))
11614 return NULL_TREE;
11615 len = CALL_EXPR_ARG (exp, 1);
11616 if (!validate_arg (len, INTEGER_TYPE))
11617 return NULL_TREE;
11618 flag = CALL_EXPR_ARG (exp, 2);
11619 if (!validate_arg (flag, INTEGER_TYPE))
11620 return NULL_TREE;
11621 size = CALL_EXPR_ARG (exp, 3);
11622 if (!validate_arg (size, INTEGER_TYPE))
11623 return NULL_TREE;
11624 fmt = CALL_EXPR_ARG (exp, 4);
11625 if (!validate_arg (fmt, POINTER_TYPE))
11626 return NULL_TREE;
11628 if (! host_integerp (size, 1))
11629 return NULL_TREE;
11631 if (! integer_all_onesp (size))
11633 if (! host_integerp (len, 1))
11635 /* If LEN is not constant, try MAXLEN too.
11636 For MAXLEN only allow optimizing into non-_ocs function
11637 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11638 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11639 return NULL_TREE;
11641 else
11642 maxlen = len;
11644 if (tree_int_cst_lt (size, maxlen))
11645 return NULL_TREE;
11648 if (!init_target_chars ())
11649 return NULL_TREE;
11651 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
11652 or if format doesn't contain % chars or is "%s". */
11653 if (! integer_zerop (flag))
11655 fmt_str = c_getstr (fmt);
11656 if (fmt_str == NULL)
11657 return NULL_TREE;
11658 if (strchr (fmt_str, target_percent) != NULL
11659 && strcmp (fmt_str, target_percent_s))
11660 return NULL_TREE;
11663 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
11664 available. */
11665 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
11666 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
11667 if (!fn)
11668 return NULL_TREE;
11670 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
11673 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11674 FMT and ARG are the arguments to the call; we don't fold cases with
11675 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11677 Return NULL_TREE if no simplification was possible, otherwise return the
11678 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11679 code of the function to be simplified. */
11681 static tree
11682 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
11683 enum built_in_function fcode)
11685 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
11686 const char *fmt_str = NULL;
11688 /* If the return value is used, don't do the transformation. */
11689 if (! ignore)
11690 return NULL_TREE;
11692 /* Verify the required arguments in the original call. */
11693 if (!validate_arg (fmt, POINTER_TYPE))
11694 return NULL_TREE;
11696 /* Check whether the format is a literal string constant. */
11697 fmt_str = c_getstr (fmt);
11698 if (fmt_str == NULL)
11699 return NULL_TREE;
11701 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
11703 /* If we're using an unlocked function, assume the other
11704 unlocked functions exist explicitly. */
11705 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
11706 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
11708 else
11710 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
11711 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
11714 if (!init_target_chars ())
11715 return NULL_TREE;
11717 if (strcmp (fmt_str, target_percent_s) == 0
11718 || strchr (fmt_str, target_percent) == NULL)
11720 const char *str;
11722 if (strcmp (fmt_str, target_percent_s) == 0)
11724 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11725 return NULL_TREE;
11727 if (!arg || !validate_arg (arg, POINTER_TYPE))
11728 return NULL_TREE;
11730 str = c_getstr (arg);
11731 if (str == NULL)
11732 return NULL_TREE;
11734 else
11736 /* The format specifier doesn't contain any '%' characters. */
11737 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
11738 && arg)
11739 return NULL_TREE;
11740 str = fmt_str;
11743 /* If the string was "", printf does nothing. */
11744 if (str[0] == '\0')
11745 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11747 /* If the string has length of 1, call putchar. */
11748 if (str[1] == '\0')
11750 /* Given printf("c"), (where c is any one character,)
11751 convert "c"[0] to an int and pass that to the replacement
11752 function. */
11753 newarg = build_int_cst (NULL_TREE, str[0]);
11754 if (fn_putchar)
11755 call = build_call_expr (fn_putchar, 1, newarg);
11757 else
11759 /* If the string was "string\n", call puts("string"). */
11760 size_t len = strlen (str);
11761 if ((unsigned char)str[len - 1] == target_newline)
11763 /* Create a NUL-terminated string that's one char shorter
11764 than the original, stripping off the trailing '\n'. */
11765 char *newstr = alloca (len);
11766 memcpy (newstr, str, len - 1);
11767 newstr[len - 1] = 0;
11769 newarg = build_string_literal (len, newstr);
11770 if (fn_puts)
11771 call = build_call_expr (fn_puts, 1, newarg);
11773 else
11774 /* We'd like to arrange to call fputs(string,stdout) here,
11775 but we need stdout and don't have a way to get it yet. */
11776 return NULL_TREE;
11780 /* The other optimizations can be done only on the non-va_list variants. */
11781 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11782 return NULL_TREE;
11784 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
11785 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
11787 if (!arg || !validate_arg (arg, POINTER_TYPE))
11788 return NULL_TREE;
11789 if (fn_puts)
11790 call = build_call_expr (fn_puts, 1, arg);
11793 /* If the format specifier was "%c", call __builtin_putchar(arg). */
11794 else if (strcmp (fmt_str, target_percent_c) == 0)
11796 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11797 return NULL_TREE;
11798 if (fn_putchar)
11799 call = build_call_expr (fn_putchar, 1, arg);
11802 if (!call)
11803 return NULL_TREE;
11805 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
11808 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
11809 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
11810 more than 3 arguments, and ARG may be null in the 2-argument case.
11812 Return NULL_TREE if no simplification was possible, otherwise return the
11813 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11814 code of the function to be simplified. */
11816 static tree
11817 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
11818 enum built_in_function fcode)
11820 tree fn_fputc, fn_fputs, call = NULL_TREE;
11821 const char *fmt_str = NULL;
11823 /* If the return value is used, don't do the transformation. */
11824 if (! ignore)
11825 return NULL_TREE;
11827 /* Verify the required arguments in the original call. */
11828 if (!validate_arg (fp, POINTER_TYPE))
11829 return NULL_TREE;
11830 if (!validate_arg (fmt, POINTER_TYPE))
11831 return NULL_TREE;
11833 /* Check whether the format is a literal string constant. */
11834 fmt_str = c_getstr (fmt);
11835 if (fmt_str == NULL)
11836 return NULL_TREE;
11838 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
11840 /* If we're using an unlocked function, assume the other
11841 unlocked functions exist explicitly. */
11842 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
11843 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
11845 else
11847 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
11848 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
11851 if (!init_target_chars ())
11852 return NULL_TREE;
11854 /* If the format doesn't contain % args or %%, use strcpy. */
11855 if (strchr (fmt_str, target_percent) == NULL)
11857 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
11858 && arg)
11859 return NULL_TREE;
11861 /* If the format specifier was "", fprintf does nothing. */
11862 if (fmt_str[0] == '\0')
11864 /* If FP has side-effects, just wait until gimplification is
11865 done. */
11866 if (TREE_SIDE_EFFECTS (fp))
11867 return NULL_TREE;
11869 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11872 /* When "string" doesn't contain %, replace all cases of
11873 fprintf (fp, string) with fputs (string, fp). The fputs
11874 builtin will take care of special cases like length == 1. */
11875 if (fn_fputs)
11876 call = build_call_expr (fn_fputs, 2, fmt, fp);
11879 /* The other optimizations can be done only on the non-va_list variants. */
11880 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
11881 return NULL_TREE;
11883 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
11884 else if (strcmp (fmt_str, target_percent_s) == 0)
11886 if (!arg || !validate_arg (arg, POINTER_TYPE))
11887 return NULL_TREE;
11888 if (fn_fputs)
11889 call = build_call_expr (fn_fputs, 2, arg, fp);
11892 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
11893 else if (strcmp (fmt_str, target_percent_c) == 0)
11895 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11896 return NULL_TREE;
11897 if (fn_fputc)
11898 call = build_call_expr (fn_fputc, 2, arg, fp);
11901 if (!call)
11902 return NULL_TREE;
11903 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
11906 /* Initialize format string characters in the target charset. */
11908 static bool
11909 init_target_chars (void)
11911 static bool init;
11912 if (!init)
11914 target_newline = lang_hooks.to_target_charset ('\n');
11915 target_percent = lang_hooks.to_target_charset ('%');
11916 target_c = lang_hooks.to_target_charset ('c');
11917 target_s = lang_hooks.to_target_charset ('s');
11918 if (target_newline == 0 || target_percent == 0 || target_c == 0
11919 || target_s == 0)
11920 return false;
11922 target_percent_c[0] = target_percent;
11923 target_percent_c[1] = target_c;
11924 target_percent_c[2] = '\0';
11926 target_percent_s[0] = target_percent;
11927 target_percent_s[1] = target_s;
11928 target_percent_s[2] = '\0';
11930 target_percent_s_newline[0] = target_percent;
11931 target_percent_s_newline[1] = target_s;
11932 target_percent_s_newline[2] = target_newline;
11933 target_percent_s_newline[3] = '\0';
11935 init = true;
11937 return true;
11940 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11941 and no overflow/underflow occurred. INEXACT is true if M was not
11942 exactly calculated. TYPE is the tree type for the result. This
11943 function assumes that you cleared the MPFR flags and then
11944 calculated M to see if anything subsequently set a flag prior to
11945 entering this function. Return NULL_TREE if any checks fail. */
11947 static tree
11948 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11950 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11951 overflow/underflow occurred. If -frounding-math, proceed iff the
11952 result of calling FUNC was exact. */
11953 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11954 && (!flag_rounding_math || !inexact))
11956 REAL_VALUE_TYPE rr;
11958 real_from_mpfr (&rr, m);
11959 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11960 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11961 but the mpft_t is not, then we underflowed in the
11962 conversion. */
11963 if (!real_isnan (&rr) && !real_isinf (&rr)
11964 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11966 REAL_VALUE_TYPE rmode;
11968 real_convert (&rmode, TYPE_MODE (type), &rr);
11969 /* Proceed iff the specified mode can hold the value. */
11970 if (real_identical (&rmode, &rr))
11971 return build_real (type, rmode);
11974 return NULL_TREE;
11977 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11978 FUNC on it and return the resulting value as a tree with type TYPE.
11979 If MIN and/or MAX are not NULL, then the supplied ARG must be
11980 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11981 acceptable values, otherwise they are not. The mpfr precision is
11982 set to the precision of TYPE. We assume that function FUNC returns
11983 zero if the result could be calculated exactly within the requested
11984 precision. */
11986 static tree
11987 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11988 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11989 bool inclusive)
11991 tree result = NULL_TREE;
11993 STRIP_NOPS (arg);
11995 /* To proceed, MPFR must exactly represent the target floating point
11996 format, which only happens when the target base equals two. */
11997 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11998 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12000 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12002 if (!real_isnan (ra) && !real_isinf (ra)
12003 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12004 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12006 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12007 int inexact;
12008 mpfr_t m;
12010 mpfr_init2 (m, prec);
12011 mpfr_from_real (m, ra);
12012 mpfr_clear_flags ();
12013 inexact = func (m, m, GMP_RNDN);
12014 result = do_mpfr_ckconv (m, type, inexact);
12015 mpfr_clear (m);
12019 return result;
12022 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12023 FUNC on it and return the resulting value as a tree with type TYPE.
12024 The mpfr precision is set to the precision of TYPE. We assume that
12025 function FUNC returns zero if the result could be calculated
12026 exactly within the requested precision. */
12028 static tree
12029 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12030 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12032 tree result = NULL_TREE;
12034 STRIP_NOPS (arg1);
12035 STRIP_NOPS (arg2);
12037 /* To proceed, MPFR must exactly represent the target floating point
12038 format, which only happens when the target base equals two. */
12039 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12040 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12041 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12043 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12044 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12046 if (!real_isnan (ra1) && !real_isinf (ra1)
12047 && !real_isnan (ra2) && !real_isinf (ra2))
12049 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12050 int inexact;
12051 mpfr_t m1, m2;
12053 mpfr_inits2 (prec, m1, m2, NULL);
12054 mpfr_from_real (m1, ra1);
12055 mpfr_from_real (m2, ra2);
12056 mpfr_clear_flags ();
12057 inexact = func (m1, m1, m2, GMP_RNDN);
12058 result = do_mpfr_ckconv (m1, type, inexact);
12059 mpfr_clears (m1, m2, NULL);
12063 return result;
12066 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12067 FUNC on it and return the resulting value as a tree with type TYPE.
12068 The mpfr precision is set to the precision of TYPE. We assume that
12069 function FUNC returns zero if the result could be calculated
12070 exactly within the requested precision. */
12072 static tree
12073 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12074 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12076 tree result = NULL_TREE;
12078 STRIP_NOPS (arg1);
12079 STRIP_NOPS (arg2);
12080 STRIP_NOPS (arg3);
12082 /* To proceed, MPFR must exactly represent the target floating point
12083 format, which only happens when the target base equals two. */
12084 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12085 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12086 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12087 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12089 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12090 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12091 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12093 if (!real_isnan (ra1) && !real_isinf (ra1)
12094 && !real_isnan (ra2) && !real_isinf (ra2)
12095 && !real_isnan (ra3) && !real_isinf (ra3))
12097 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12098 int inexact;
12099 mpfr_t m1, m2, m3;
12101 mpfr_inits2 (prec, m1, m2, m3, NULL);
12102 mpfr_from_real (m1, ra1);
12103 mpfr_from_real (m2, ra2);
12104 mpfr_from_real (m3, ra3);
12105 mpfr_clear_flags ();
12106 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12107 result = do_mpfr_ckconv (m1, type, inexact);
12108 mpfr_clears (m1, m2, m3, NULL);
12112 return result;
12115 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12116 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12117 If ARG_SINP and ARG_COSP are NULL then the result is returned
12118 as a complex value.
12119 The type is taken from the type of ARG and is used for setting the
12120 precision of the calculation and results. */
12122 static tree
12123 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12125 tree const type = TREE_TYPE (arg);
12126 tree result = NULL_TREE;
12128 STRIP_NOPS (arg);
12130 /* To proceed, MPFR must exactly represent the target floating point
12131 format, which only happens when the target base equals two. */
12132 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12133 && TREE_CODE (arg) == REAL_CST
12134 && !TREE_OVERFLOW (arg))
12136 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12138 if (!real_isnan (ra) && !real_isinf (ra))
12140 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12141 tree result_s, result_c;
12142 int inexact;
12143 mpfr_t m, ms, mc;
12145 mpfr_inits2 (prec, m, ms, mc, NULL);
12146 mpfr_from_real (m, ra);
12147 mpfr_clear_flags ();
12148 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12149 result_s = do_mpfr_ckconv (ms, type, inexact);
12150 result_c = do_mpfr_ckconv (mc, type, inexact);
12151 mpfr_clears (m, ms, mc, NULL);
12152 if (result_s && result_c)
12154 /* If we are to return in a complex value do so. */
12155 if (!arg_sinp && !arg_cosp)
12156 return build_complex (build_complex_type (type),
12157 result_c, result_s);
12159 /* Dereference the sin/cos pointer arguments. */
12160 arg_sinp = build_fold_indirect_ref (arg_sinp);
12161 arg_cosp = build_fold_indirect_ref (arg_cosp);
12162 /* Proceed if valid pointer type were passed in. */
12163 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12164 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12166 /* Set the values. */
12167 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12168 result_s);
12169 TREE_SIDE_EFFECTS (result_s) = 1;
12170 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12171 result_c);
12172 TREE_SIDE_EFFECTS (result_c) = 1;
12173 /* Combine the assignments into a compound expr. */
12174 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12175 result_s, result_c));
12180 return result;