1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names
[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names
[(int) END_BUILTINS
] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls
[(int) END_BUILTINS
];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls
[(int) END_BUILTINS
];
77 static const char *c_getstr (tree
);
78 static rtx
c_readstr (const char *, enum machine_mode
);
79 static int target_char_cast (tree
, char *);
80 static rtx
get_memory_rtx (tree
, tree
);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx
result_vector (int, rtx
);
86 static void expand_builtin_update_setjmp_buf (rtx
);
87 static void expand_builtin_prefetch (tree
);
88 static rtx
expand_builtin_apply_args (void);
89 static rtx
expand_builtin_apply_args_1 (void);
90 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
91 static void expand_builtin_return (rtx
);
92 static enum type_class
type_to_class (tree
);
93 static rtx
expand_builtin_classify_type (tree
);
94 static void expand_errno_check (tree
, rtx
);
95 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
96 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
97 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
98 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
, rtx
);
99 static rtx
expand_builtin_sincos (tree
);
100 static rtx
expand_builtin_cexpi (tree
, rtx
, rtx
);
101 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
102 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
103 static rtx
expand_builtin_args_info (tree
);
104 static rtx
expand_builtin_next_arg (void);
105 static rtx
expand_builtin_va_start (tree
);
106 static rtx
expand_builtin_va_end (tree
);
107 static rtx
expand_builtin_va_copy (tree
);
108 static rtx
expand_builtin_memchr (tree
, rtx
, enum machine_mode
);
109 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
110 static rtx
expand_builtin_strcmp (tree
, rtx
, enum machine_mode
);
111 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
112 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
113 static rtx
expand_builtin_strcat (tree
, tree
, rtx
, enum machine_mode
);
114 static rtx
expand_builtin_strncat (tree
, rtx
, enum machine_mode
);
115 static rtx
expand_builtin_strspn (tree
, rtx
, enum machine_mode
);
116 static rtx
expand_builtin_strcspn (tree
, rtx
, enum machine_mode
);
117 static rtx
expand_builtin_memcpy (tree
, rtx
, enum machine_mode
);
118 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
119 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, tree
, rtx
,
120 enum machine_mode
, int);
121 static rtx
expand_builtin_memmove (tree
, rtx
, enum machine_mode
, int);
122 static rtx
expand_builtin_memmove_args (tree
, tree
, tree
, tree
, rtx
,
123 enum machine_mode
, int);
124 static rtx
expand_builtin_bcopy (tree
, int);
125 static rtx
expand_builtin_strcpy (tree
, tree
, rtx
, enum machine_mode
);
126 static rtx
expand_builtin_strcpy_args (tree
, tree
, tree
, rtx
, enum machine_mode
);
127 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
128 static rtx
expand_builtin_strncpy (tree
, rtx
, enum machine_mode
);
129 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
130 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
131 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
132 static rtx
expand_builtin_bzero (tree
);
133 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
134 static rtx
expand_builtin_strstr (tree
, rtx
, enum machine_mode
);
135 static rtx
expand_builtin_strpbrk (tree
, rtx
, enum machine_mode
);
136 static rtx
expand_builtin_strchr (tree
, rtx
, enum machine_mode
);
137 static rtx
expand_builtin_strrchr (tree
, rtx
, enum machine_mode
);
138 static rtx
expand_builtin_alloca (tree
, rtx
);
139 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
140 static rtx
expand_builtin_frame_address (tree
, tree
);
141 static rtx
expand_builtin_fputs (tree
, rtx
, bool);
142 static rtx
expand_builtin_printf (tree
, rtx
, enum machine_mode
, bool);
143 static rtx
expand_builtin_fprintf (tree
, rtx
, enum machine_mode
, bool);
144 static rtx
expand_builtin_sprintf (tree
, rtx
, enum machine_mode
);
145 static tree
stabilize_va_list (tree
, int);
146 static rtx
expand_builtin_expect (tree
, rtx
);
147 static tree
fold_builtin_constant_p (tree
);
148 static tree
fold_builtin_expect (tree
, tree
);
149 static tree
fold_builtin_classify_type (tree
);
150 static tree
fold_builtin_strlen (tree
);
151 static tree
fold_builtin_inf (tree
, int);
152 static tree
fold_builtin_nan (tree
, tree
, int);
153 static tree
rewrite_call_expr (tree
, int, tree
, int, ...);
154 static bool validate_arg (const_tree
, enum tree_code code
);
155 static bool integer_valued_real_p (tree
);
156 static tree
fold_trunc_transparent_mathfn (tree
, tree
);
157 static bool readonly_data_expr (tree
);
158 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
159 static rtx
expand_builtin_signbit (tree
, rtx
);
160 static tree
fold_builtin_sqrt (tree
, tree
);
161 static tree
fold_builtin_cbrt (tree
, tree
);
162 static tree
fold_builtin_pow (tree
, tree
, tree
, tree
);
163 static tree
fold_builtin_powi (tree
, tree
, tree
, tree
);
164 static tree
fold_builtin_cos (tree
, tree
, tree
);
165 static tree
fold_builtin_cosh (tree
, tree
, tree
);
166 static tree
fold_builtin_tan (tree
, tree
);
167 static tree
fold_builtin_trunc (tree
, tree
);
168 static tree
fold_builtin_floor (tree
, tree
);
169 static tree
fold_builtin_ceil (tree
, tree
);
170 static tree
fold_builtin_round (tree
, tree
);
171 static tree
fold_builtin_int_roundingfn (tree
, tree
);
172 static tree
fold_builtin_bitop (tree
, tree
);
173 static tree
fold_builtin_memory_op (tree
, tree
, tree
, tree
, bool, int);
174 static tree
fold_builtin_strchr (tree
, tree
, tree
);
175 static tree
fold_builtin_memchr (tree
, tree
, tree
, tree
);
176 static tree
fold_builtin_memcmp (tree
, tree
, tree
);
177 static tree
fold_builtin_strcmp (tree
, tree
);
178 static tree
fold_builtin_strncmp (tree
, tree
, tree
);
179 static tree
fold_builtin_signbit (tree
, tree
);
180 static tree
fold_builtin_copysign (tree
, tree
, tree
, tree
);
181 static tree
fold_builtin_isascii (tree
);
182 static tree
fold_builtin_toascii (tree
);
183 static tree
fold_builtin_isdigit (tree
);
184 static tree
fold_builtin_fabs (tree
, tree
);
185 static tree
fold_builtin_abs (tree
, tree
);
186 static tree
fold_builtin_unordered_cmp (tree
, tree
, tree
, enum tree_code
,
188 static tree
fold_builtin_n (tree
, tree
*, int, bool);
189 static tree
fold_builtin_0 (tree
, bool);
190 static tree
fold_builtin_1 (tree
, tree
, bool);
191 static tree
fold_builtin_2 (tree
, tree
, tree
, bool);
192 static tree
fold_builtin_3 (tree
, tree
, tree
, tree
, bool);
193 static tree
fold_builtin_4 (tree
, tree
, tree
, tree
, tree
, bool);
194 static tree
fold_builtin_varargs (tree
, tree
, bool);
196 static tree
fold_builtin_strpbrk (tree
, tree
, tree
);
197 static tree
fold_builtin_strstr (tree
, tree
, tree
);
198 static tree
fold_builtin_strrchr (tree
, tree
, tree
);
199 static tree
fold_builtin_strcat (tree
, tree
);
200 static tree
fold_builtin_strncat (tree
, tree
, tree
);
201 static tree
fold_builtin_strspn (tree
, tree
);
202 static tree
fold_builtin_strcspn (tree
, tree
);
203 static tree
fold_builtin_sprintf (tree
, tree
, tree
, int);
205 static rtx
expand_builtin_object_size (tree
);
206 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
207 enum built_in_function
);
208 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
209 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
210 static void maybe_emit_free_warning (tree
);
211 static tree
fold_builtin_object_size (tree
, tree
);
212 static tree
fold_builtin_strcat_chk (tree
, tree
, tree
, tree
);
213 static tree
fold_builtin_strncat_chk (tree
, tree
, tree
, tree
, tree
);
214 static tree
fold_builtin_sprintf_chk (tree
, enum built_in_function
);
215 static tree
fold_builtin_printf (tree
, tree
, tree
, bool, enum built_in_function
);
216 static tree
fold_builtin_fprintf (tree
, tree
, tree
, tree
, bool,
217 enum built_in_function
);
218 static bool init_target_chars (void);
220 static unsigned HOST_WIDE_INT target_newline
;
221 static unsigned HOST_WIDE_INT target_percent
;
222 static unsigned HOST_WIDE_INT target_c
;
223 static unsigned HOST_WIDE_INT target_s
;
224 static char target_percent_c
[3];
225 static char target_percent_s
[3];
226 static char target_percent_s_newline
[4];
227 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
228 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
229 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
230 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
231 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
232 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
233 static tree
do_mpfr_sincos (tree
, tree
, tree
);
234 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
235 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
236 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
237 const REAL_VALUE_TYPE
*, bool);
238 static tree
do_mpfr_remquo (tree
, tree
, tree
);
239 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
242 /* Return true if NODE should be considered for inline expansion regardless
243 of the optimization level. This means whenever a function is invoked with
244 its "internal" name, which normally contains the prefix "__builtin". */
246 static bool called_as_built_in (tree node
)
248 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
249 if (strncmp (name
, "__builtin_", 10) == 0)
251 if (strncmp (name
, "__sync_", 7) == 0)
256 /* Return the alignment in bits of EXP, a pointer valued expression.
257 But don't return more than MAX_ALIGN no matter what.
258 The alignment returned is, by default, the alignment of the thing that
259 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
261 Otherwise, look at the expression to see if we can do better, i.e., if the
262 expression is actually pointing at an object whose alignment is tighter. */
265 get_pointer_alignment (tree exp
, unsigned int max_align
)
267 unsigned int align
, inner
;
269 /* We rely on TER to compute accurate alignment information. */
270 if (!(optimize
&& flag_tree_ter
))
273 if (!POINTER_TYPE_P (TREE_TYPE (exp
)))
276 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
277 align
= MIN (align
, max_align
);
281 switch (TREE_CODE (exp
))
284 exp
= TREE_OPERAND (exp
, 0);
285 if (! POINTER_TYPE_P (TREE_TYPE (exp
)))
288 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
289 align
= MIN (inner
, max_align
);
292 case POINTER_PLUS_EXPR
:
293 /* If sum of pointer + int, restrict our maximum alignment to that
294 imposed by the integer. If not, we can't do any better than
296 if (! host_integerp (TREE_OPERAND (exp
, 1), 1))
299 while (((tree_low_cst (TREE_OPERAND (exp
, 1), 1))
300 & (max_align
/ BITS_PER_UNIT
- 1))
304 exp
= TREE_OPERAND (exp
, 0);
308 /* See what we are pointing at and look at its alignment. */
309 exp
= TREE_OPERAND (exp
, 0);
311 if (handled_component_p (exp
))
313 HOST_WIDE_INT bitsize
, bitpos
;
315 enum machine_mode mode
;
316 int unsignedp
, volatilep
;
318 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
319 &mode
, &unsignedp
, &volatilep
, true);
321 inner
= MIN (inner
, (unsigned) (bitpos
& -bitpos
));
322 if (offset
&& TREE_CODE (offset
) == PLUS_EXPR
323 && host_integerp (TREE_OPERAND (offset
, 1), 1))
325 /* Any overflow in calculating offset_bits won't change
328 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
332 inner
= MIN (inner
, (offset_bits
& -offset_bits
));
333 offset
= TREE_OPERAND (offset
, 0);
335 if (offset
&& TREE_CODE (offset
) == MULT_EXPR
336 && host_integerp (TREE_OPERAND (offset
, 1), 1))
338 /* Any overflow in calculating offset_factor won't change
340 unsigned offset_factor
341 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
345 inner
= MIN (inner
, (offset_factor
& -offset_factor
));
348 inner
= MIN (inner
, BITS_PER_UNIT
);
351 align
= MIN (inner
, DECL_ALIGN (exp
));
352 #ifdef CONSTANT_ALIGNMENT
353 else if (CONSTANT_CLASS_P (exp
))
354 align
= MIN (inner
, (unsigned)CONSTANT_ALIGNMENT (exp
, align
));
356 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
357 || TREE_CODE (exp
) == INDIRECT_REF
)
358 align
= MIN (TYPE_ALIGN (TREE_TYPE (exp
)), inner
);
360 align
= MIN (align
, inner
);
361 return MIN (align
, max_align
);
369 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
370 way, because it could contain a zero byte in the middle.
371 TREE_STRING_LENGTH is the size of the character array, not the string.
373 ONLY_VALUE should be nonzero if the result is not going to be emitted
374 into the instruction stream and zero if it is going to be expanded.
375 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
376 is returned, otherwise NULL, since
377 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
378 evaluate the side-effects.
380 The value returned is of type `ssizetype'.
382 Unfortunately, string_constant can't access the values of const char
383 arrays with initializers, so neither can we do so here. */
386 c_strlen (tree src
, int only_value
)
389 HOST_WIDE_INT offset
;
394 if (TREE_CODE (src
) == COND_EXPR
395 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
399 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
400 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
401 if (tree_int_cst_equal (len1
, len2
))
405 if (TREE_CODE (src
) == COMPOUND_EXPR
406 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
407 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
409 src
= string_constant (src
, &offset_node
);
413 max
= TREE_STRING_LENGTH (src
) - 1;
414 ptr
= TREE_STRING_POINTER (src
);
416 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
418 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
419 compute the offset to the following null if we don't know where to
420 start searching for it. */
423 for (i
= 0; i
< max
; i
++)
427 /* We don't know the starting offset, but we do know that the string
428 has no internal zero bytes. We can assume that the offset falls
429 within the bounds of the string; otherwise, the programmer deserves
430 what he gets. Subtract the offset from the length of the string,
431 and return that. This would perhaps not be valid if we were dealing
432 with named arrays in addition to literal string constants. */
434 return size_diffop (size_int (max
), offset_node
);
437 /* We have a known offset into the string. Start searching there for
438 a null character if we can represent it as a single HOST_WIDE_INT. */
439 if (offset_node
== 0)
441 else if (! host_integerp (offset_node
, 0))
444 offset
= tree_low_cst (offset_node
, 0);
446 /* If the offset is known to be out of bounds, warn, and call strlen at
448 if (offset
< 0 || offset
> max
)
450 /* Suppress multiple warnings for propagated constant strings. */
451 if (! TREE_NO_WARNING (src
))
453 warning (0, "offset outside bounds of constant string");
454 TREE_NO_WARNING (src
) = 1;
459 /* Use strlen to search for the first zero byte. Since any strings
460 constructed with build_string will have nulls appended, we win even
461 if we get handed something like (char[4])"abcd".
463 Since OFFSET is our starting index into the string, no further
464 calculation is needed. */
465 return ssize_int (strlen (ptr
+ offset
));
468 /* Return a char pointer for a C string if it is a string constant
469 or sum of string constant and integer constant. */
476 src
= string_constant (src
, &offset_node
);
480 if (offset_node
== 0)
481 return TREE_STRING_POINTER (src
);
482 else if (!host_integerp (offset_node
, 1)
483 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
486 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
489 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
490 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
493 c_readstr (const char *str
, enum machine_mode mode
)
499 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
504 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
507 if (WORDS_BIG_ENDIAN
)
508 j
= GET_MODE_SIZE (mode
) - i
- 1;
509 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
510 && GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
511 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
513 gcc_assert (j
<= 2 * HOST_BITS_PER_WIDE_INT
);
516 ch
= (unsigned char) str
[i
];
517 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
519 return immed_double_const (c
[0], c
[1], mode
);
522 /* Cast a target constant CST to target CHAR and if that value fits into
523 host char type, return zero and put that value into variable pointed to by
527 target_char_cast (tree cst
, char *p
)
529 unsigned HOST_WIDE_INT val
, hostval
;
531 if (!host_integerp (cst
, 1)
532 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
535 val
= tree_low_cst (cst
, 1);
536 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
537 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
540 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
541 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
550 /* Similar to save_expr, but assumes that arbitrary code is not executed
551 in between the multiple evaluations. In particular, we assume that a
552 non-addressable local variable will not be modified. */
555 builtin_save_expr (tree exp
)
557 if (TREE_ADDRESSABLE (exp
) == 0
558 && (TREE_CODE (exp
) == PARM_DECL
559 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
))))
562 return save_expr (exp
);
565 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
566 times to get the address of either a higher stack frame, or a return
567 address located within it (depending on FNDECL_CODE). */
570 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
574 #ifdef INITIAL_FRAME_ADDRESS_RTX
575 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
579 /* For a zero count with __builtin_return_address, we don't care what
580 frame address we return, because target-specific definitions will
581 override us. Therefore frame pointer elimination is OK, and using
582 the soft frame pointer is OK.
584 For a nonzero count, or a zero count with __builtin_frame_address,
585 we require a stable offset from the current frame pointer to the
586 previous one, so we must use the hard frame pointer, and
587 we must disable frame pointer elimination. */
588 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
589 tem
= frame_pointer_rtx
;
592 tem
= hard_frame_pointer_rtx
;
594 /* Tell reload not to eliminate the frame pointer. */
595 crtl
->accesses_prior_frames
= 1;
599 /* Some machines need special handling before we can access
600 arbitrary frames. For example, on the SPARC, we must first flush
601 all register windows to the stack. */
602 #ifdef SETUP_FRAME_ADDRESSES
604 SETUP_FRAME_ADDRESSES ();
607 /* On the SPARC, the return address is not in the frame, it is in a
608 register. There is no way to access it off of the current frame
609 pointer, but it can be accessed off the previous frame pointer by
610 reading the value from the register window save area. */
611 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
612 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
616 /* Scan back COUNT frames to the specified frame. */
617 for (i
= 0; i
< count
; i
++)
619 /* Assume the dynamic chain pointer is in the word that the
620 frame address points to, unless otherwise specified. */
621 #ifdef DYNAMIC_CHAIN_ADDRESS
622 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
624 tem
= memory_address (Pmode
, tem
);
625 tem
= gen_frame_mem (Pmode
, tem
);
626 tem
= copy_to_reg (tem
);
629 /* For __builtin_frame_address, return what we've got. But, on
630 the SPARC for example, we may have to add a bias. */
631 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
632 #ifdef FRAME_ADDR_RTX
633 return FRAME_ADDR_RTX (tem
);
638 /* For __builtin_return_address, get the return address from that frame. */
639 #ifdef RETURN_ADDR_RTX
640 tem
= RETURN_ADDR_RTX (count
, tem
);
642 tem
= memory_address (Pmode
,
643 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
644 tem
= gen_frame_mem (Pmode
, tem
);
649 /* Alias set used for setjmp buffer. */
650 static alias_set_type setjmp_alias_set
= -1;
652 /* Construct the leading half of a __builtin_setjmp call. Control will
653 return to RECEIVER_LABEL. This is also called directly by the SJLJ
654 exception handling code. */
657 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
659 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
663 if (setjmp_alias_set
== -1)
664 setjmp_alias_set
= new_alias_set ();
666 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
668 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
670 /* We store the frame pointer and the address of receiver_label in
671 the buffer and use the rest of it for the stack save area, which
672 is machine-dependent. */
674 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
675 set_mem_alias_set (mem
, setjmp_alias_set
);
676 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
678 mem
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
))),
679 set_mem_alias_set (mem
, setjmp_alias_set
);
681 emit_move_insn (validize_mem (mem
),
682 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
684 stack_save
= gen_rtx_MEM (sa_mode
,
685 plus_constant (buf_addr
,
686 2 * GET_MODE_SIZE (Pmode
)));
687 set_mem_alias_set (stack_save
, setjmp_alias_set
);
688 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
690 /* If there is further processing to do, do it. */
691 #ifdef HAVE_builtin_setjmp_setup
692 if (HAVE_builtin_setjmp_setup
)
693 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
696 /* Tell optimize_save_area_alloca that extra work is going to
697 need to go on during alloca. */
698 cfun
->calls_setjmp
= 1;
700 /* We have a nonlocal label. */
701 cfun
->has_nonlocal_label
= 1;
704 /* Construct the trailing part of a __builtin_setjmp call. This is
705 also called directly by the SJLJ exception handling code. */
708 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
710 /* Clobber the FP when we get here, so we have to make sure it's
711 marked as used by this function. */
712 emit_use (hard_frame_pointer_rtx
);
714 /* Mark the static chain as clobbered here so life information
715 doesn't get messed up for it. */
716 emit_clobber (static_chain_rtx
);
718 /* Now put in the code to restore the frame pointer, and argument
719 pointer, if needed. */
720 #ifdef HAVE_nonlocal_goto
721 if (! HAVE_nonlocal_goto
)
724 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
725 /* This might change the hard frame pointer in ways that aren't
726 apparent to early optimization passes, so force a clobber. */
727 emit_clobber (hard_frame_pointer_rtx
);
730 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
731 if (fixed_regs
[ARG_POINTER_REGNUM
])
733 #ifdef ELIMINABLE_REGS
735 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
737 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
738 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
739 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
742 if (i
== ARRAY_SIZE (elim_regs
))
745 /* Now restore our arg pointer from the address at which it
746 was saved in our stack frame. */
747 emit_move_insn (crtl
->args
.internal_arg_pointer
,
748 copy_to_reg (get_arg_pointer_save_area ()));
753 #ifdef HAVE_builtin_setjmp_receiver
754 if (HAVE_builtin_setjmp_receiver
)
755 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
758 #ifdef HAVE_nonlocal_goto_receiver
759 if (HAVE_nonlocal_goto_receiver
)
760 emit_insn (gen_nonlocal_goto_receiver ());
765 /* We must not allow the code we just generated to be reordered by
766 scheduling. Specifically, the update of the frame pointer must
767 happen immediately, not later. */
768 emit_insn (gen_blockage ());
771 /* __builtin_longjmp is passed a pointer to an array of five words (not
772 all will be used on all machines). It operates similarly to the C
773 library function of the same name, but is more efficient. Much of
774 the code below is copied from the handling of non-local gotos. */
777 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
779 rtx fp
, lab
, stack
, insn
, last
;
780 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
782 /* DRAP is needed for stack realign if longjmp is expanded to current
784 if (SUPPORTS_STACK_ALIGNMENT
)
785 crtl
->need_drap
= true;
787 if (setjmp_alias_set
== -1)
788 setjmp_alias_set
= new_alias_set ();
790 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
792 buf_addr
= force_reg (Pmode
, buf_addr
);
794 /* We used to store value in static_chain_rtx, but that fails if pointers
795 are smaller than integers. We instead require that the user must pass
796 a second argument of 1, because that is what builtin_setjmp will
797 return. This also makes EH slightly more efficient, since we are no
798 longer copying around a value that we don't care about. */
799 gcc_assert (value
== const1_rtx
);
801 last
= get_last_insn ();
802 #ifdef HAVE_builtin_longjmp
803 if (HAVE_builtin_longjmp
)
804 emit_insn (gen_builtin_longjmp (buf_addr
));
808 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
809 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
810 GET_MODE_SIZE (Pmode
)));
812 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
813 2 * GET_MODE_SIZE (Pmode
)));
814 set_mem_alias_set (fp
, setjmp_alias_set
);
815 set_mem_alias_set (lab
, setjmp_alias_set
);
816 set_mem_alias_set (stack
, setjmp_alias_set
);
818 /* Pick up FP, label, and SP from the block and jump. This code is
819 from expand_goto in stmt.c; see there for detailed comments. */
820 #ifdef HAVE_nonlocal_goto
821 if (HAVE_nonlocal_goto
)
822 /* We have to pass a value to the nonlocal_goto pattern that will
823 get copied into the static_chain pointer, but it does not matter
824 what that value is, because builtin_setjmp does not use it. */
825 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
829 lab
= copy_to_reg (lab
);
831 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
832 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
834 emit_move_insn (hard_frame_pointer_rtx
, fp
);
835 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
837 emit_use (hard_frame_pointer_rtx
);
838 emit_use (stack_pointer_rtx
);
839 emit_indirect_jump (lab
);
843 /* Search backwards and mark the jump insn as a non-local goto.
844 Note that this precludes the use of __builtin_longjmp to a
845 __builtin_setjmp target in the same function. However, we've
846 already cautioned the user that these functions are for
847 internal exception handling use only. */
848 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
850 gcc_assert (insn
!= last
);
854 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
857 else if (CALL_P (insn
))
862 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
863 and the address of the save area. */
866 expand_builtin_nonlocal_goto (tree exp
)
868 tree t_label
, t_save_area
;
869 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
871 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
874 t_label
= CALL_EXPR_ARG (exp
, 0);
875 t_save_area
= CALL_EXPR_ARG (exp
, 1);
877 r_label
= expand_normal (t_label
);
878 r_label
= convert_memory_address (Pmode
, r_label
);
879 r_save_area
= expand_normal (t_save_area
);
880 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
881 /* Copy the address of the save location to a register just in case it was based
882 on the frame pointer. */
883 r_save_area
= copy_to_reg (r_save_area
);
884 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
885 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
886 plus_constant (r_save_area
, GET_MODE_SIZE (Pmode
)));
888 crtl
->has_nonlocal_goto
= 1;
890 #ifdef HAVE_nonlocal_goto
891 /* ??? We no longer need to pass the static chain value, afaik. */
892 if (HAVE_nonlocal_goto
)
893 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
897 r_label
= copy_to_reg (r_label
);
899 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
900 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
902 /* Restore frame pointer for containing function.
903 This sets the actual hard register used for the frame pointer
904 to the location of the function's incoming static chain info.
905 The non-local goto handler will then adjust it to contain the
906 proper value and reload the argument pointer, if needed. */
907 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
908 emit_stack_restore (SAVE_NONLOCAL
, r_sp
, NULL_RTX
);
910 /* USE of hard_frame_pointer_rtx added for consistency;
911 not clear if really needed. */
912 emit_use (hard_frame_pointer_rtx
);
913 emit_use (stack_pointer_rtx
);
915 /* If the architecture is using a GP register, we must
916 conservatively assume that the target function makes use of it.
917 The prologue of functions with nonlocal gotos must therefore
918 initialize the GP register to the appropriate value, and we
919 must then make sure that this value is live at the point
920 of the jump. (Note that this doesn't necessarily apply
921 to targets with a nonlocal_goto pattern; they are free
922 to implement it in their own way. Note also that this is
923 a no-op if the GP register is a global invariant.) */
924 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
925 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
926 emit_use (pic_offset_table_rtx
);
928 emit_indirect_jump (r_label
);
931 /* Search backwards to the jump insn and mark it as a
933 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
937 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
940 else if (CALL_P (insn
))
947 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
948 (not all will be used on all machines) that was passed to __builtin_setjmp.
949 It updates the stack pointer in that block to correspond to the current
953 expand_builtin_update_setjmp_buf (rtx buf_addr
)
955 enum machine_mode sa_mode
= Pmode
;
959 #ifdef HAVE_save_stack_nonlocal
960 if (HAVE_save_stack_nonlocal
)
961 sa_mode
= insn_data
[(int) CODE_FOR_save_stack_nonlocal
].operand
[0].mode
;
963 #ifdef STACK_SAVEAREA_MODE
964 sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
968 = gen_rtx_MEM (sa_mode
,
971 plus_constant (buf_addr
, 2 * GET_MODE_SIZE (Pmode
))));
975 emit_insn (gen_setjmp ());
978 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
981 /* Expand a call to __builtin_prefetch. For a target that does not support
982 data prefetch, evaluate the memory address argument in case it has side
986 expand_builtin_prefetch (tree exp
)
988 tree arg0
, arg1
, arg2
;
992 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
995 arg0
= CALL_EXPR_ARG (exp
, 0);
997 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
998 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1000 nargs
= call_expr_nargs (exp
);
1002 arg1
= CALL_EXPR_ARG (exp
, 1);
1004 arg1
= integer_zero_node
;
1006 arg2
= CALL_EXPR_ARG (exp
, 2);
1008 arg2
= build_int_cst (NULL_TREE
, 3);
1010 /* Argument 0 is an address. */
1011 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1013 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1014 if (TREE_CODE (arg1
) != INTEGER_CST
)
1016 error ("second argument to %<__builtin_prefetch%> must be a constant");
1017 arg1
= integer_zero_node
;
1019 op1
= expand_normal (arg1
);
1020 /* Argument 1 must be either zero or one. */
1021 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1023 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1028 /* Argument 2 (locality) must be a compile-time constant int. */
1029 if (TREE_CODE (arg2
) != INTEGER_CST
)
1031 error ("third argument to %<__builtin_prefetch%> must be a constant");
1032 arg2
= integer_zero_node
;
1034 op2
= expand_normal (arg2
);
1035 /* Argument 2 must be 0, 1, 2, or 3. */
1036 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1038 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1042 #ifdef HAVE_prefetch
1045 if ((! (*insn_data
[(int) CODE_FOR_prefetch
].operand
[0].predicate
)
1047 insn_data
[(int) CODE_FOR_prefetch
].operand
[0].mode
))
1048 || (GET_MODE (op0
) != Pmode
))
1050 op0
= convert_memory_address (Pmode
, op0
);
1051 op0
= force_reg (Pmode
, op0
);
1053 emit_insn (gen_prefetch (op0
, op1
, op2
));
1057 /* Don't do anything with direct references to volatile memory, but
1058 generate code to handle other side effects. */
1059 if (!MEM_P (op0
) && side_effects_p (op0
))
1063 /* Get a MEM rtx for expression EXP which is the address of an operand
1064 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1065 the maximum length of the block of memory that might be accessed or
1069 get_memory_rtx (tree exp
, tree len
)
1071 rtx addr
= expand_expr (exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1072 rtx mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1074 /* Get an expression we can use to find the attributes to assign to MEM.
1075 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1076 we can. First remove any nops. */
1077 while (CONVERT_EXPR_P (exp
)
1078 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1079 exp
= TREE_OPERAND (exp
, 0);
1081 if (TREE_CODE (exp
) == ADDR_EXPR
)
1082 exp
= TREE_OPERAND (exp
, 0);
1083 else if (POINTER_TYPE_P (TREE_TYPE (exp
)))
1084 exp
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (exp
)), exp
);
1088 /* Honor attributes derived from exp, except for the alias set
1089 (as builtin stringops may alias with anything) and the size
1090 (as stringops may access multiple array elements). */
1093 set_mem_attributes (mem
, exp
, 0);
1095 /* Allow the string and memory builtins to overflow from one
1096 field into another, see http://gcc.gnu.org/PR23561.
1097 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1098 memory accessed by the string or memory builtin will fit
1099 within the field. */
1100 if (MEM_EXPR (mem
) && TREE_CODE (MEM_EXPR (mem
)) == COMPONENT_REF
)
1102 tree mem_expr
= MEM_EXPR (mem
);
1103 HOST_WIDE_INT offset
= -1, length
= -1;
1106 while (TREE_CODE (inner
) == ARRAY_REF
1107 || CONVERT_EXPR_P (inner
)
1108 || TREE_CODE (inner
) == VIEW_CONVERT_EXPR
1109 || TREE_CODE (inner
) == SAVE_EXPR
)
1110 inner
= TREE_OPERAND (inner
, 0);
1112 gcc_assert (TREE_CODE (inner
) == COMPONENT_REF
);
1114 if (MEM_OFFSET (mem
)
1115 && GET_CODE (MEM_OFFSET (mem
)) == CONST_INT
)
1116 offset
= INTVAL (MEM_OFFSET (mem
));
1118 if (offset
>= 0 && len
&& host_integerp (len
, 0))
1119 length
= tree_low_cst (len
, 0);
1121 while (TREE_CODE (inner
) == COMPONENT_REF
)
1123 tree field
= TREE_OPERAND (inner
, 1);
1124 gcc_assert (TREE_CODE (mem_expr
) == COMPONENT_REF
);
1125 gcc_assert (field
== TREE_OPERAND (mem_expr
, 1));
1127 /* Bitfields are generally not byte-addressable. */
1128 gcc_assert (!DECL_BIT_FIELD (field
)
1129 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
1130 % BITS_PER_UNIT
) == 0
1131 && host_integerp (DECL_SIZE (field
), 0)
1132 && (TREE_INT_CST_LOW (DECL_SIZE (field
))
1133 % BITS_PER_UNIT
) == 0));
1135 /* If we can prove that the memory starting at XEXP (mem, 0) and
1136 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1137 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1138 fields without DECL_SIZE_UNIT like flexible array members. */
1140 && DECL_SIZE_UNIT (field
)
1141 && host_integerp (DECL_SIZE_UNIT (field
), 0))
1144 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field
));
1147 && offset
+ length
<= size
)
1152 && host_integerp (DECL_FIELD_OFFSET (field
), 0))
1153 offset
+= TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
))
1154 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
1162 mem_expr
= TREE_OPERAND (mem_expr
, 0);
1163 inner
= TREE_OPERAND (inner
, 0);
1166 if (mem_expr
== NULL
)
1168 if (mem_expr
!= MEM_EXPR (mem
))
1170 set_mem_expr (mem
, mem_expr
);
1171 set_mem_offset (mem
, offset
>= 0 ? GEN_INT (offset
) : NULL_RTX
);
1174 set_mem_alias_set (mem
, 0);
1175 set_mem_size (mem
, NULL_RTX
);
1181 /* Built-in functions to perform an untyped call and return. */
1183 /* For each register that may be used for calling a function, this
1184 gives a mode used to copy the register's value. VOIDmode indicates
1185 the register is not used for calling a function. If the machine
1186 has register windows, this gives only the outbound registers.
1187 INCOMING_REGNO gives the corresponding inbound register. */
1188 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
1190 /* For each register that may be used for returning values, this gives
1191 a mode used to copy the register's value. VOIDmode indicates the
1192 register is not used for returning values. If the machine has
1193 register windows, this gives only the outbound registers.
1194 INCOMING_REGNO gives the corresponding inbound register. */
1195 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
1197 /* For each register that may be used for calling a function, this
1198 gives the offset of that register into the block returned by
1199 __builtin_apply_args. 0 indicates that the register is not
1200 used for calling a function. */
1201 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
1203 /* Return the size required for the block returned by __builtin_apply_args,
1204 and initialize apply_args_mode. */
1207 apply_args_size (void)
1209 static int size
= -1;
1212 enum machine_mode mode
;
1214 /* The values computed by this function never change. */
1217 /* The first value is the incoming arg-pointer. */
1218 size
= GET_MODE_SIZE (Pmode
);
1220 /* The second value is the structure value address unless this is
1221 passed as an "invisible" first argument. */
1222 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1223 size
+= GET_MODE_SIZE (Pmode
);
1225 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1226 if (FUNCTION_ARG_REGNO_P (regno
))
1228 mode
= reg_raw_mode
[regno
];
1230 gcc_assert (mode
!= VOIDmode
);
1232 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1233 if (size
% align
!= 0)
1234 size
= CEIL (size
, align
) * align
;
1235 apply_args_reg_offset
[regno
] = size
;
1236 size
+= GET_MODE_SIZE (mode
);
1237 apply_args_mode
[regno
] = mode
;
1241 apply_args_mode
[regno
] = VOIDmode
;
1242 apply_args_reg_offset
[regno
] = 0;
1248 /* Return the size required for the block returned by __builtin_apply,
1249 and initialize apply_result_mode. */
1252 apply_result_size (void)
1254 static int size
= -1;
1256 enum machine_mode mode
;
1258 /* The values computed by this function never change. */
1263 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1264 if (FUNCTION_VALUE_REGNO_P (regno
))
1266 mode
= reg_raw_mode
[regno
];
1268 gcc_assert (mode
!= VOIDmode
);
1270 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1271 if (size
% align
!= 0)
1272 size
= CEIL (size
, align
) * align
;
1273 size
+= GET_MODE_SIZE (mode
);
1274 apply_result_mode
[regno
] = mode
;
1277 apply_result_mode
[regno
] = VOIDmode
;
1279 /* Allow targets that use untyped_call and untyped_return to override
1280 the size so that machine-specific information can be stored here. */
1281 #ifdef APPLY_RESULT_SIZE
1282 size
= APPLY_RESULT_SIZE
;
1288 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1289 /* Create a vector describing the result block RESULT. If SAVEP is true,
1290 the result block is used to save the values; otherwise it is used to
1291 restore the values. */
1294 result_vector (int savep
, rtx result
)
1296 int regno
, size
, align
, nelts
;
1297 enum machine_mode mode
;
1299 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1302 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1303 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1305 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1306 if (size
% align
!= 0)
1307 size
= CEIL (size
, align
) * align
;
1308 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1309 mem
= adjust_address (result
, mode
, size
);
1310 savevec
[nelts
++] = (savep
1311 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1312 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1313 size
+= GET_MODE_SIZE (mode
);
1315 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1317 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1319 /* Save the state required to perform an untyped call with the same
1320 arguments as were passed to the current function. */
1323 expand_builtin_apply_args_1 (void)
1326 int size
, align
, regno
;
1327 enum machine_mode mode
;
1328 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1330 /* Create a block where the arg-pointer, structure value address,
1331 and argument registers can be saved. */
1332 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1334 /* Walk past the arg-pointer and structure value address. */
1335 size
= GET_MODE_SIZE (Pmode
);
1336 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1337 size
+= GET_MODE_SIZE (Pmode
);
1339 /* Save each register used in calling a function to the block. */
1340 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1341 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1343 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1344 if (size
% align
!= 0)
1345 size
= CEIL (size
, align
) * align
;
1347 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1349 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1350 size
+= GET_MODE_SIZE (mode
);
1353 /* Save the arg pointer to the block. */
1354 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1355 #ifdef STACK_GROWS_DOWNWARD
1356 /* We need the pointer as the caller actually passed them to us, not
1357 as we might have pretended they were passed. Make sure it's a valid
1358 operand, as emit_move_insn isn't expected to handle a PLUS. */
1360 = force_operand (plus_constant (tem
, crtl
->args
.pretend_args_size
),
1363 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1365 size
= GET_MODE_SIZE (Pmode
);
1367 /* Save the structure value address unless this is passed as an
1368 "invisible" first argument. */
1369 if (struct_incoming_value
)
1371 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1372 copy_to_reg (struct_incoming_value
));
1373 size
+= GET_MODE_SIZE (Pmode
);
1376 /* Return the address of the block. */
1377 return copy_addr_to_reg (XEXP (registers
, 0));
1380 /* __builtin_apply_args returns block of memory allocated on
1381 the stack into which is stored the arg pointer, structure
1382 value address, static chain, and all the registers that might
1383 possibly be used in performing a function call. The code is
1384 moved to the start of the function so the incoming values are
1388 expand_builtin_apply_args (void)
1390 /* Don't do __builtin_apply_args more than once in a function.
1391 Save the result of the first call and reuse it. */
1392 if (apply_args_value
!= 0)
1393 return apply_args_value
;
1395 /* When this function is called, it means that registers must be
1396 saved on entry to this function. So we migrate the
1397 call to the first insn of this function. */
1402 temp
= expand_builtin_apply_args_1 ();
1406 apply_args_value
= temp
;
1408 /* Put the insns after the NOTE that starts the function.
1409 If this is inside a start_sequence, make the outer-level insn
1410 chain current, so the code is placed at the start of the
1412 push_topmost_sequence ();
1413 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1414 pop_topmost_sequence ();
1419 /* Perform an untyped call and save the state required to perform an
1420 untyped return of whatever value was returned by the given function. */
1423 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1425 int size
, align
, regno
;
1426 enum machine_mode mode
;
1427 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1428 rtx old_stack_level
= 0;
1429 rtx call_fusage
= 0;
1430 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1432 arguments
= convert_memory_address (Pmode
, arguments
);
1434 /* Create a block where the return registers can be saved. */
1435 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1437 /* Fetch the arg pointer from the ARGUMENTS block. */
1438 incoming_args
= gen_reg_rtx (Pmode
);
1439 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1440 #ifndef STACK_GROWS_DOWNWARD
1441 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1442 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1445 /* Push a new argument block and copy the arguments. Do not allow
1446 the (potential) memcpy call below to interfere with our stack
1448 do_pending_stack_adjust ();
1451 /* Save the stack with nonlocal if available. */
1452 #ifdef HAVE_save_stack_nonlocal
1453 if (HAVE_save_stack_nonlocal
)
1454 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
1457 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1459 /* Allocate a block of memory onto the stack and copy the memory
1460 arguments to the outgoing arguments address. */
1461 allocate_dynamic_stack_space (argsize
, 0, BITS_PER_UNIT
);
1463 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1464 may have already set current_function_calls_alloca to true.
1465 current_function_calls_alloca won't be set if argsize is zero,
1466 so we have to guarantee need_drap is true here. */
1467 if (SUPPORTS_STACK_ALIGNMENT
)
1468 crtl
->need_drap
= true;
1470 dest
= virtual_outgoing_args_rtx
;
1471 #ifndef STACK_GROWS_DOWNWARD
1472 if (GET_CODE (argsize
) == CONST_INT
)
1473 dest
= plus_constant (dest
, -INTVAL (argsize
));
1475 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1477 dest
= gen_rtx_MEM (BLKmode
, dest
);
1478 set_mem_align (dest
, PARM_BOUNDARY
);
1479 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1480 set_mem_align (src
, PARM_BOUNDARY
);
1481 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1483 /* Refer to the argument block. */
1485 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1486 set_mem_align (arguments
, PARM_BOUNDARY
);
1488 /* Walk past the arg-pointer and structure value address. */
1489 size
= GET_MODE_SIZE (Pmode
);
1491 size
+= GET_MODE_SIZE (Pmode
);
1493 /* Restore each of the registers previously saved. Make USE insns
1494 for each of these registers for use in making the call. */
1495 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1496 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1498 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1499 if (size
% align
!= 0)
1500 size
= CEIL (size
, align
) * align
;
1501 reg
= gen_rtx_REG (mode
, regno
);
1502 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1503 use_reg (&call_fusage
, reg
);
1504 size
+= GET_MODE_SIZE (mode
);
1507 /* Restore the structure value address unless this is passed as an
1508 "invisible" first argument. */
1509 size
= GET_MODE_SIZE (Pmode
);
1512 rtx value
= gen_reg_rtx (Pmode
);
1513 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1514 emit_move_insn (struct_value
, value
);
1515 if (REG_P (struct_value
))
1516 use_reg (&call_fusage
, struct_value
);
1517 size
+= GET_MODE_SIZE (Pmode
);
1520 /* All arguments and registers used for the call are set up by now! */
1521 function
= prepare_call_address (function
, NULL
, &call_fusage
, 0, 0);
1523 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1524 and we don't want to load it into a register as an optimization,
1525 because prepare_call_address already did it if it should be done. */
1526 if (GET_CODE (function
) != SYMBOL_REF
)
1527 function
= memory_address (FUNCTION_MODE
, function
);
1529 /* Generate the actual call instruction and save the return value. */
1530 #ifdef HAVE_untyped_call
1531 if (HAVE_untyped_call
)
1532 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1533 result
, result_vector (1, result
)));
1536 #ifdef HAVE_call_value
1537 if (HAVE_call_value
)
1541 /* Locate the unique return register. It is not possible to
1542 express a call that sets more than one return register using
1543 call_value; use untyped_call for that. In fact, untyped_call
1544 only needs to save the return registers in the given block. */
1545 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1546 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1548 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1550 valreg
= gen_rtx_REG (mode
, regno
);
1553 emit_call_insn (GEN_CALL_VALUE (valreg
,
1554 gen_rtx_MEM (FUNCTION_MODE
, function
),
1555 const0_rtx
, NULL_RTX
, const0_rtx
));
1557 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1563 /* Find the CALL insn we just emitted, and attach the register usage
1565 call_insn
= last_call_insn ();
1566 add_function_usage_to (call_insn
, call_fusage
);
1568 /* Restore the stack. */
1569 #ifdef HAVE_save_stack_nonlocal
1570 if (HAVE_save_stack_nonlocal
)
1571 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
1574 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1578 /* Return the address of the result block. */
1579 result
= copy_addr_to_reg (XEXP (result
, 0));
1580 return convert_memory_address (ptr_mode
, result
);
1583 /* Perform an untyped return. */
1586 expand_builtin_return (rtx result
)
1588 int size
, align
, regno
;
1589 enum machine_mode mode
;
1591 rtx call_fusage
= 0;
1593 result
= convert_memory_address (Pmode
, result
);
1595 apply_result_size ();
1596 result
= gen_rtx_MEM (BLKmode
, result
);
1598 #ifdef HAVE_untyped_return
1599 if (HAVE_untyped_return
)
1601 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1607 /* Restore the return value and note that each value is used. */
1609 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1610 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1612 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1613 if (size
% align
!= 0)
1614 size
= CEIL (size
, align
) * align
;
1615 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1616 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1618 push_to_sequence (call_fusage
);
1620 call_fusage
= get_insns ();
1622 size
+= GET_MODE_SIZE (mode
);
1625 /* Put the USE insns before the return. */
1626 emit_insn (call_fusage
);
1628 /* Return whatever values was restored by jumping directly to the end
1630 expand_naked_return ();
1633 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1635 static enum type_class
1636 type_to_class (tree type
)
1638 switch (TREE_CODE (type
))
1640 case VOID_TYPE
: return void_type_class
;
1641 case INTEGER_TYPE
: return integer_type_class
;
1642 case ENUMERAL_TYPE
: return enumeral_type_class
;
1643 case BOOLEAN_TYPE
: return boolean_type_class
;
1644 case POINTER_TYPE
: return pointer_type_class
;
1645 case REFERENCE_TYPE
: return reference_type_class
;
1646 case OFFSET_TYPE
: return offset_type_class
;
1647 case REAL_TYPE
: return real_type_class
;
1648 case COMPLEX_TYPE
: return complex_type_class
;
1649 case FUNCTION_TYPE
: return function_type_class
;
1650 case METHOD_TYPE
: return method_type_class
;
1651 case RECORD_TYPE
: return record_type_class
;
1653 case QUAL_UNION_TYPE
: return union_type_class
;
1654 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1655 ? string_type_class
: array_type_class
);
1656 case LANG_TYPE
: return lang_type_class
;
1657 default: return no_type_class
;
1661 /* Expand a call EXP to __builtin_classify_type. */
1664 expand_builtin_classify_type (tree exp
)
1666 if (call_expr_nargs (exp
))
1667 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1668 return GEN_INT (no_type_class
);
1671 /* This helper macro, meant to be used in mathfn_built_in below,
1672 determines which among a set of three builtin math functions is
1673 appropriate for a given type mode. The `F' and `L' cases are
1674 automatically generated from the `double' case. */
1675 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1676 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1677 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1678 fcodel = BUILT_IN_MATHFN##L ; break;
1679 /* Similar to above, but appends _R after any F/L suffix. */
1680 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1681 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1682 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1683 fcodel = BUILT_IN_MATHFN##L_R ; break;
1685 /* Return mathematic function equivalent to FN but operating directly
1686 on TYPE, if available. If IMPLICIT is true find the function in
1687 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1688 can't do the conversion, return zero. */
1691 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit
)
1693 tree
const *const fn_arr
1694 = implicit
? implicit_built_in_decls
: built_in_decls
;
1695 enum built_in_function fcode
, fcodef
, fcodel
;
1699 CASE_MATHFN (BUILT_IN_ACOS
)
1700 CASE_MATHFN (BUILT_IN_ACOSH
)
1701 CASE_MATHFN (BUILT_IN_ASIN
)
1702 CASE_MATHFN (BUILT_IN_ASINH
)
1703 CASE_MATHFN (BUILT_IN_ATAN
)
1704 CASE_MATHFN (BUILT_IN_ATAN2
)
1705 CASE_MATHFN (BUILT_IN_ATANH
)
1706 CASE_MATHFN (BUILT_IN_CBRT
)
1707 CASE_MATHFN (BUILT_IN_CEIL
)
1708 CASE_MATHFN (BUILT_IN_CEXPI
)
1709 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1710 CASE_MATHFN (BUILT_IN_COS
)
1711 CASE_MATHFN (BUILT_IN_COSH
)
1712 CASE_MATHFN (BUILT_IN_DREM
)
1713 CASE_MATHFN (BUILT_IN_ERF
)
1714 CASE_MATHFN (BUILT_IN_ERFC
)
1715 CASE_MATHFN (BUILT_IN_EXP
)
1716 CASE_MATHFN (BUILT_IN_EXP10
)
1717 CASE_MATHFN (BUILT_IN_EXP2
)
1718 CASE_MATHFN (BUILT_IN_EXPM1
)
1719 CASE_MATHFN (BUILT_IN_FABS
)
1720 CASE_MATHFN (BUILT_IN_FDIM
)
1721 CASE_MATHFN (BUILT_IN_FLOOR
)
1722 CASE_MATHFN (BUILT_IN_FMA
)
1723 CASE_MATHFN (BUILT_IN_FMAX
)
1724 CASE_MATHFN (BUILT_IN_FMIN
)
1725 CASE_MATHFN (BUILT_IN_FMOD
)
1726 CASE_MATHFN (BUILT_IN_FREXP
)
1727 CASE_MATHFN (BUILT_IN_GAMMA
)
1728 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1729 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1730 CASE_MATHFN (BUILT_IN_HYPOT
)
1731 CASE_MATHFN (BUILT_IN_ILOGB
)
1732 CASE_MATHFN (BUILT_IN_INF
)
1733 CASE_MATHFN (BUILT_IN_ISINF
)
1734 CASE_MATHFN (BUILT_IN_J0
)
1735 CASE_MATHFN (BUILT_IN_J1
)
1736 CASE_MATHFN (BUILT_IN_JN
)
1737 CASE_MATHFN (BUILT_IN_LCEIL
)
1738 CASE_MATHFN (BUILT_IN_LDEXP
)
1739 CASE_MATHFN (BUILT_IN_LFLOOR
)
1740 CASE_MATHFN (BUILT_IN_LGAMMA
)
1741 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1742 CASE_MATHFN (BUILT_IN_LLCEIL
)
1743 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1744 CASE_MATHFN (BUILT_IN_LLRINT
)
1745 CASE_MATHFN (BUILT_IN_LLROUND
)
1746 CASE_MATHFN (BUILT_IN_LOG
)
1747 CASE_MATHFN (BUILT_IN_LOG10
)
1748 CASE_MATHFN (BUILT_IN_LOG1P
)
1749 CASE_MATHFN (BUILT_IN_LOG2
)
1750 CASE_MATHFN (BUILT_IN_LOGB
)
1751 CASE_MATHFN (BUILT_IN_LRINT
)
1752 CASE_MATHFN (BUILT_IN_LROUND
)
1753 CASE_MATHFN (BUILT_IN_MODF
)
1754 CASE_MATHFN (BUILT_IN_NAN
)
1755 CASE_MATHFN (BUILT_IN_NANS
)
1756 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1757 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1758 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1759 CASE_MATHFN (BUILT_IN_POW
)
1760 CASE_MATHFN (BUILT_IN_POWI
)
1761 CASE_MATHFN (BUILT_IN_POW10
)
1762 CASE_MATHFN (BUILT_IN_REMAINDER
)
1763 CASE_MATHFN (BUILT_IN_REMQUO
)
1764 CASE_MATHFN (BUILT_IN_RINT
)
1765 CASE_MATHFN (BUILT_IN_ROUND
)
1766 CASE_MATHFN (BUILT_IN_SCALB
)
1767 CASE_MATHFN (BUILT_IN_SCALBLN
)
1768 CASE_MATHFN (BUILT_IN_SCALBN
)
1769 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1770 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1771 CASE_MATHFN (BUILT_IN_SIN
)
1772 CASE_MATHFN (BUILT_IN_SINCOS
)
1773 CASE_MATHFN (BUILT_IN_SINH
)
1774 CASE_MATHFN (BUILT_IN_SQRT
)
1775 CASE_MATHFN (BUILT_IN_TAN
)
1776 CASE_MATHFN (BUILT_IN_TANH
)
1777 CASE_MATHFN (BUILT_IN_TGAMMA
)
1778 CASE_MATHFN (BUILT_IN_TRUNC
)
1779 CASE_MATHFN (BUILT_IN_Y0
)
1780 CASE_MATHFN (BUILT_IN_Y1
)
1781 CASE_MATHFN (BUILT_IN_YN
)
1787 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1788 return fn_arr
[fcode
];
1789 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1790 return fn_arr
[fcodef
];
1791 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1792 return fn_arr
[fcodel
];
1797 /* Like mathfn_built_in_1(), but always use the implicit array. */
1800 mathfn_built_in (tree type
, enum built_in_function fn
)
1802 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1805 /* If errno must be maintained, expand the RTL to check if the result,
1806 TARGET, of a built-in function call, EXP, is NaN, and if so set
1810 expand_errno_check (tree exp
, rtx target
)
1812 rtx lab
= gen_label_rtx ();
1814 /* Test the result; if it is NaN, set errno=EDOM because
1815 the argument was not in the domain. */
1816 emit_cmp_and_jump_insns (target
, target
, EQ
, 0, GET_MODE (target
),
1820 /* If this built-in doesn't throw an exception, set errno directly. */
1821 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1823 #ifdef GEN_ERRNO_RTX
1824 rtx errno_rtx
= GEN_ERRNO_RTX
;
1827 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1829 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
1835 /* Make sure the library call isn't expanded as a tail call. */
1836 CALL_EXPR_TAILCALL (exp
) = 0;
1838 /* We can't set errno=EDOM directly; let the library call do it.
1839 Pop the arguments right away in case the call gets deleted. */
1841 expand_call (exp
, target
, 0);
1846 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1847 Return NULL_RTX if a normal call should be emitted rather than expanding
1848 the function in-line. EXP is the expression that is a call to the builtin
1849 function; if convenient, the result should be placed in TARGET.
1850 SUBTARGET may be used as the target for computing one of EXP's operands. */
1853 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1855 optab builtin_optab
;
1856 rtx op0
, insns
, before_call
;
1857 tree fndecl
= get_callee_fndecl (exp
);
1858 enum machine_mode mode
;
1859 bool errno_set
= false;
1862 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
1865 arg
= CALL_EXPR_ARG (exp
, 0);
1867 switch (DECL_FUNCTION_CODE (fndecl
))
1869 CASE_FLT_FN (BUILT_IN_SQRT
):
1870 errno_set
= ! tree_expr_nonnegative_p (arg
);
1871 builtin_optab
= sqrt_optab
;
1873 CASE_FLT_FN (BUILT_IN_EXP
):
1874 errno_set
= true; builtin_optab
= exp_optab
; break;
1875 CASE_FLT_FN (BUILT_IN_EXP10
):
1876 CASE_FLT_FN (BUILT_IN_POW10
):
1877 errno_set
= true; builtin_optab
= exp10_optab
; break;
1878 CASE_FLT_FN (BUILT_IN_EXP2
):
1879 errno_set
= true; builtin_optab
= exp2_optab
; break;
1880 CASE_FLT_FN (BUILT_IN_EXPM1
):
1881 errno_set
= true; builtin_optab
= expm1_optab
; break;
1882 CASE_FLT_FN (BUILT_IN_LOGB
):
1883 errno_set
= true; builtin_optab
= logb_optab
; break;
1884 CASE_FLT_FN (BUILT_IN_LOG
):
1885 errno_set
= true; builtin_optab
= log_optab
; break;
1886 CASE_FLT_FN (BUILT_IN_LOG10
):
1887 errno_set
= true; builtin_optab
= log10_optab
; break;
1888 CASE_FLT_FN (BUILT_IN_LOG2
):
1889 errno_set
= true; builtin_optab
= log2_optab
; break;
1890 CASE_FLT_FN (BUILT_IN_LOG1P
):
1891 errno_set
= true; builtin_optab
= log1p_optab
; break;
1892 CASE_FLT_FN (BUILT_IN_ASIN
):
1893 builtin_optab
= asin_optab
; break;
1894 CASE_FLT_FN (BUILT_IN_ACOS
):
1895 builtin_optab
= acos_optab
; break;
1896 CASE_FLT_FN (BUILT_IN_TAN
):
1897 builtin_optab
= tan_optab
; break;
1898 CASE_FLT_FN (BUILT_IN_ATAN
):
1899 builtin_optab
= atan_optab
; break;
1900 CASE_FLT_FN (BUILT_IN_FLOOR
):
1901 builtin_optab
= floor_optab
; break;
1902 CASE_FLT_FN (BUILT_IN_CEIL
):
1903 builtin_optab
= ceil_optab
; break;
1904 CASE_FLT_FN (BUILT_IN_TRUNC
):
1905 builtin_optab
= btrunc_optab
; break;
1906 CASE_FLT_FN (BUILT_IN_ROUND
):
1907 builtin_optab
= round_optab
; break;
1908 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1909 builtin_optab
= nearbyint_optab
;
1910 if (flag_trapping_math
)
1912 /* Else fallthrough and expand as rint. */
1913 CASE_FLT_FN (BUILT_IN_RINT
):
1914 builtin_optab
= rint_optab
; break;
1919 /* Make a suitable register to place result in. */
1920 mode
= TYPE_MODE (TREE_TYPE (exp
));
1922 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
1925 /* Before working hard, check whether the instruction is available. */
1926 if (optab_handler (builtin_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
1928 target
= gen_reg_rtx (mode
);
1930 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1931 need to expand the argument again. This way, we will not perform
1932 side-effects more the once. */
1933 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
1935 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
1939 /* Compute into TARGET.
1940 Set TARGET to wherever the result comes back. */
1941 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
1946 expand_errno_check (exp
, target
);
1948 /* Output the entire sequence. */
1949 insns
= get_insns ();
1955 /* If we were unable to expand via the builtin, stop the sequence
1956 (without outputting the insns) and call to the library function
1957 with the stabilized argument list. */
1961 before_call
= get_last_insn ();
1963 return expand_call (exp
, target
, target
== const0_rtx
);
1966 /* Expand a call to the builtin binary math functions (pow and atan2).
1967 Return NULL_RTX if a normal call should be emitted rather than expanding the
1968 function in-line. EXP is the expression that is a call to the builtin
1969 function; if convenient, the result should be placed in TARGET.
1970 SUBTARGET may be used as the target for computing one of EXP's
1974 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
1976 optab builtin_optab
;
1977 rtx op0
, op1
, insns
;
1978 int op1_type
= REAL_TYPE
;
1979 tree fndecl
= get_callee_fndecl (exp
);
1981 enum machine_mode mode
;
1982 bool errno_set
= true;
1984 switch (DECL_FUNCTION_CODE (fndecl
))
1986 CASE_FLT_FN (BUILT_IN_SCALBN
):
1987 CASE_FLT_FN (BUILT_IN_SCALBLN
):
1988 CASE_FLT_FN (BUILT_IN_LDEXP
):
1989 op1_type
= INTEGER_TYPE
;
1994 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
1997 arg0
= CALL_EXPR_ARG (exp
, 0);
1998 arg1
= CALL_EXPR_ARG (exp
, 1);
2000 switch (DECL_FUNCTION_CODE (fndecl
))
2002 CASE_FLT_FN (BUILT_IN_POW
):
2003 builtin_optab
= pow_optab
; break;
2004 CASE_FLT_FN (BUILT_IN_ATAN2
):
2005 builtin_optab
= atan2_optab
; break;
2006 CASE_FLT_FN (BUILT_IN_SCALB
):
2007 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2009 builtin_optab
= scalb_optab
; break;
2010 CASE_FLT_FN (BUILT_IN_SCALBN
):
2011 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2012 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2014 /* Fall through... */
2015 CASE_FLT_FN (BUILT_IN_LDEXP
):
2016 builtin_optab
= ldexp_optab
; break;
2017 CASE_FLT_FN (BUILT_IN_FMOD
):
2018 builtin_optab
= fmod_optab
; break;
2019 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2020 CASE_FLT_FN (BUILT_IN_DREM
):
2021 builtin_optab
= remainder_optab
; break;
2026 /* Make a suitable register to place result in. */
2027 mode
= TYPE_MODE (TREE_TYPE (exp
));
2029 /* Before working hard, check whether the instruction is available. */
2030 if (optab_handler (builtin_optab
, mode
)->insn_code
== CODE_FOR_nothing
)
2033 target
= gen_reg_rtx (mode
);
2035 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2038 /* Always stabilize the argument list. */
2039 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2040 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2042 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2043 op1
= expand_normal (arg1
);
2047 /* Compute into TARGET.
2048 Set TARGET to wherever the result comes back. */
2049 target
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2050 target
, 0, OPTAB_DIRECT
);
2052 /* If we were unable to expand via the builtin, stop the sequence
2053 (without outputting the insns) and call to the library function
2054 with the stabilized argument list. */
2058 return expand_call (exp
, target
, target
== const0_rtx
);
2062 expand_errno_check (exp
, target
);
2064 /* Output the entire sequence. */
2065 insns
= get_insns ();
2072 /* Expand a call to the builtin sin and cos math functions.
2073 Return NULL_RTX if a normal call should be emitted rather than expanding the
2074 function in-line. EXP is the expression that is a call to the builtin
2075 function; if convenient, the result should be placed in TARGET.
2076 SUBTARGET may be used as the target for computing one of EXP's
2080 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2082 optab builtin_optab
;
2084 tree fndecl
= get_callee_fndecl (exp
);
2085 enum machine_mode mode
;
2088 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2091 arg
= CALL_EXPR_ARG (exp
, 0);
2093 switch (DECL_FUNCTION_CODE (fndecl
))
2095 CASE_FLT_FN (BUILT_IN_SIN
):
2096 CASE_FLT_FN (BUILT_IN_COS
):
2097 builtin_optab
= sincos_optab
; break;
2102 /* Make a suitable register to place result in. */
2103 mode
= TYPE_MODE (TREE_TYPE (exp
));
2105 /* Check if sincos insn is available, otherwise fallback
2106 to sin or cos insn. */
2107 if (optab_handler (builtin_optab
, mode
)->insn_code
== CODE_FOR_nothing
)
2108 switch (DECL_FUNCTION_CODE (fndecl
))
2110 CASE_FLT_FN (BUILT_IN_SIN
):
2111 builtin_optab
= sin_optab
; break;
2112 CASE_FLT_FN (BUILT_IN_COS
):
2113 builtin_optab
= cos_optab
; break;
2118 /* Before working hard, check whether the instruction is available. */
2119 if (optab_handler (builtin_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
2121 target
= gen_reg_rtx (mode
);
2123 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2124 need to expand the argument again. This way, we will not perform
2125 side-effects more the once. */
2126 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2128 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2132 /* Compute into TARGET.
2133 Set TARGET to wherever the result comes back. */
2134 if (builtin_optab
== sincos_optab
)
2138 switch (DECL_FUNCTION_CODE (fndecl
))
2140 CASE_FLT_FN (BUILT_IN_SIN
):
2141 result
= expand_twoval_unop (builtin_optab
, op0
, 0, target
, 0);
2143 CASE_FLT_FN (BUILT_IN_COS
):
2144 result
= expand_twoval_unop (builtin_optab
, op0
, target
, 0, 0);
2149 gcc_assert (result
);
2153 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2158 /* Output the entire sequence. */
2159 insns
= get_insns ();
2165 /* If we were unable to expand via the builtin, stop the sequence
2166 (without outputting the insns) and call to the library function
2167 with the stabilized argument list. */
2171 target
= expand_call (exp
, target
, target
== const0_rtx
);
2176 /* Expand a call to one of the builtin math functions that operate on
2177 floating point argument and output an integer result (ilogb, isinf,
2179 Return 0 if a normal call should be emitted rather than expanding the
2180 function in-line. EXP is the expression that is a call to the builtin
2181 function; if convenient, the result should be placed in TARGET.
2182 SUBTARGET may be used as the target for computing one of EXP's operands. */
2185 expand_builtin_interclass_mathfn (tree exp
, rtx target
, rtx subtarget
)
2187 optab builtin_optab
= 0;
2188 enum insn_code icode
= CODE_FOR_nothing
;
2190 tree fndecl
= get_callee_fndecl (exp
);
2191 enum machine_mode mode
;
2192 bool errno_set
= false;
2195 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2198 arg
= CALL_EXPR_ARG (exp
, 0);
2200 switch (DECL_FUNCTION_CODE (fndecl
))
2202 CASE_FLT_FN (BUILT_IN_ILOGB
):
2203 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2204 CASE_FLT_FN (BUILT_IN_ISINF
):
2205 builtin_optab
= isinf_optab
; break;
2206 case BUILT_IN_ISNORMAL
:
2207 case BUILT_IN_ISFINITE
:
2208 CASE_FLT_FN (BUILT_IN_FINITE
):
2209 /* These builtins have no optabs (yet). */
2215 /* There's no easy way to detect the case we need to set EDOM. */
2216 if (flag_errno_math
&& errno_set
)
2219 /* Optab mode depends on the mode of the input argument. */
2220 mode
= TYPE_MODE (TREE_TYPE (arg
));
2223 icode
= optab_handler (builtin_optab
, mode
)->insn_code
;
2225 /* Before working hard, check whether the instruction is available. */
2226 if (icode
!= CODE_FOR_nothing
)
2228 /* Make a suitable register to place result in. */
2230 || GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
2231 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
2233 gcc_assert (insn_data
[icode
].operand
[0].predicate
2234 (target
, GET_MODE (target
)));
2236 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2237 need to expand the argument again. This way, we will not perform
2238 side-effects more the once. */
2239 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2241 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2243 if (mode
!= GET_MODE (op0
))
2244 op0
= convert_to_mode (mode
, op0
, 0);
2246 /* Compute into TARGET.
2247 Set TARGET to wherever the result comes back. */
2248 emit_unop_insn (icode
, target
, op0
, UNKNOWN
);
2252 /* If there is no optab, try generic code. */
2253 switch (DECL_FUNCTION_CODE (fndecl
))
2257 CASE_FLT_FN (BUILT_IN_ISINF
):
2259 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2260 tree
const isgr_fn
= built_in_decls
[BUILT_IN_ISGREATER
];
2261 tree
const type
= TREE_TYPE (arg
);
2265 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
2266 real_from_string (&r
, buf
);
2267 result
= build_call_expr (isgr_fn
, 2,
2268 fold_build1 (ABS_EXPR
, type
, arg
),
2269 build_real (type
, r
));
2270 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
2272 CASE_FLT_FN (BUILT_IN_FINITE
):
2273 case BUILT_IN_ISFINITE
:
2275 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2276 tree
const isle_fn
= built_in_decls
[BUILT_IN_ISLESSEQUAL
];
2277 tree
const type
= TREE_TYPE (arg
);
2281 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
2282 real_from_string (&r
, buf
);
2283 result
= build_call_expr (isle_fn
, 2,
2284 fold_build1 (ABS_EXPR
, type
, arg
),
2285 build_real (type
, r
));
2286 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
2288 case BUILT_IN_ISNORMAL
:
2290 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2291 islessequal(fabs(x),DBL_MAX). */
2292 tree
const isle_fn
= built_in_decls
[BUILT_IN_ISLESSEQUAL
];
2293 tree
const isge_fn
= built_in_decls
[BUILT_IN_ISGREATEREQUAL
];
2294 tree
const type
= TREE_TYPE (arg
);
2295 REAL_VALUE_TYPE rmax
, rmin
;
2298 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
2299 real_from_string (&rmax
, buf
);
2300 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
2301 real_from_string (&rmin
, buf
);
2302 arg
= builtin_save_expr (fold_build1 (ABS_EXPR
, type
, arg
));
2303 result
= build_call_expr (isle_fn
, 2, arg
,
2304 build_real (type
, rmax
));
2305 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
2306 build_call_expr (isge_fn
, 2, arg
,
2307 build_real (type
, rmin
)));
2308 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
2314 target
= expand_call (exp
, target
, target
== const0_rtx
);
2319 /* Expand a call to the builtin sincos math function.
2320 Return NULL_RTX if a normal call should be emitted rather than expanding the
2321 function in-line. EXP is the expression that is a call to the builtin
2325 expand_builtin_sincos (tree exp
)
2327 rtx op0
, op1
, op2
, target1
, target2
;
2328 enum machine_mode mode
;
2329 tree arg
, sinp
, cosp
;
2332 if (!validate_arglist (exp
, REAL_TYPE
,
2333 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2336 arg
= CALL_EXPR_ARG (exp
, 0);
2337 sinp
= CALL_EXPR_ARG (exp
, 1);
2338 cosp
= CALL_EXPR_ARG (exp
, 2);
2340 /* Make a suitable register to place result in. */
2341 mode
= TYPE_MODE (TREE_TYPE (arg
));
2343 /* Check if sincos insn is available, otherwise emit the call. */
2344 if (optab_handler (sincos_optab
, mode
)->insn_code
== CODE_FOR_nothing
)
2347 target1
= gen_reg_rtx (mode
);
2348 target2
= gen_reg_rtx (mode
);
2350 op0
= expand_normal (arg
);
2351 op1
= expand_normal (build_fold_indirect_ref (sinp
));
2352 op2
= expand_normal (build_fold_indirect_ref (cosp
));
2354 /* Compute into target1 and target2.
2355 Set TARGET to wherever the result comes back. */
2356 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2357 gcc_assert (result
);
2359 /* Move target1 and target2 to the memory locations indicated
2361 emit_move_insn (op1
, target1
);
2362 emit_move_insn (op2
, target2
);
2367 /* Expand a call to the internal cexpi builtin to the sincos math function.
2368 EXP is the expression that is a call to the builtin function; if convenient,
2369 the result should be placed in TARGET. SUBTARGET may be used as the target
2370 for computing one of EXP's operands. */
2373 expand_builtin_cexpi (tree exp
, rtx target
, rtx subtarget
)
2375 tree fndecl
= get_callee_fndecl (exp
);
2377 enum machine_mode mode
;
2380 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2383 arg
= CALL_EXPR_ARG (exp
, 0);
2384 type
= TREE_TYPE (arg
);
2385 mode
= TYPE_MODE (TREE_TYPE (arg
));
2387 /* Try expanding via a sincos optab, fall back to emitting a libcall
2388 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2389 is only generated from sincos, cexp or if we have either of them. */
2390 if (optab_handler (sincos_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
2392 op1
= gen_reg_rtx (mode
);
2393 op2
= gen_reg_rtx (mode
);
2395 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2397 /* Compute into op1 and op2. */
2398 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2400 else if (TARGET_HAS_SINCOS
)
2402 tree call
, fn
= NULL_TREE
;
2406 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2407 fn
= built_in_decls
[BUILT_IN_SINCOSF
];
2408 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2409 fn
= built_in_decls
[BUILT_IN_SINCOS
];
2410 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2411 fn
= built_in_decls
[BUILT_IN_SINCOSL
];
2415 op1
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2416 op2
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2417 op1a
= copy_to_mode_reg (Pmode
, XEXP (op1
, 0));
2418 op2a
= copy_to_mode_reg (Pmode
, XEXP (op2
, 0));
2419 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2420 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2422 /* Make sure not to fold the sincos call again. */
2423 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2424 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2425 call
, 3, arg
, top1
, top2
));
2429 tree call
, fn
= NULL_TREE
, narg
;
2430 tree ctype
= build_complex_type (type
);
2432 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2433 fn
= built_in_decls
[BUILT_IN_CEXPF
];
2434 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2435 fn
= built_in_decls
[BUILT_IN_CEXP
];
2436 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2437 fn
= built_in_decls
[BUILT_IN_CEXPL
];
2441 /* If we don't have a decl for cexp create one. This is the
2442 friendliest fallback if the user calls __builtin_cexpi
2443 without full target C99 function support. */
2444 if (fn
== NULL_TREE
)
2447 const char *name
= NULL
;
2449 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2451 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2453 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2456 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2457 fn
= build_fn_decl (name
, fntype
);
2460 narg
= fold_build2 (COMPLEX_EXPR
, ctype
,
2461 build_real (type
, dconst0
), arg
);
2463 /* Make sure not to fold the cexp call again. */
2464 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2465 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2466 target
, VOIDmode
, EXPAND_NORMAL
);
2469 /* Now build the proper return type. */
2470 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2471 make_tree (TREE_TYPE (arg
), op2
),
2472 make_tree (TREE_TYPE (arg
), op1
)),
2473 target
, VOIDmode
, EXPAND_NORMAL
);
2476 /* Expand a call to one of the builtin rounding functions gcc defines
2477 as an extension (lfloor and lceil). As these are gcc extensions we
2478 do not need to worry about setting errno to EDOM.
2479 If expanding via optab fails, lower expression to (int)(floor(x)).
2480 EXP is the expression that is a call to the builtin function;
2481 if convenient, the result should be placed in TARGET. */
2484 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2486 convert_optab builtin_optab
;
2487 rtx op0
, insns
, tmp
;
2488 tree fndecl
= get_callee_fndecl (exp
);
2489 enum built_in_function fallback_fn
;
2490 tree fallback_fndecl
;
2491 enum machine_mode mode
;
2494 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2497 arg
= CALL_EXPR_ARG (exp
, 0);
2499 switch (DECL_FUNCTION_CODE (fndecl
))
2501 CASE_FLT_FN (BUILT_IN_LCEIL
):
2502 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2503 builtin_optab
= lceil_optab
;
2504 fallback_fn
= BUILT_IN_CEIL
;
2507 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2508 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2509 builtin_optab
= lfloor_optab
;
2510 fallback_fn
= BUILT_IN_FLOOR
;
2517 /* Make a suitable register to place result in. */
2518 mode
= TYPE_MODE (TREE_TYPE (exp
));
2520 target
= gen_reg_rtx (mode
);
2522 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2523 need to expand the argument again. This way, we will not perform
2524 side-effects more the once. */
2525 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2527 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2531 /* Compute into TARGET. */
2532 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2534 /* Output the entire sequence. */
2535 insns
= get_insns ();
2541 /* If we were unable to expand via the builtin, stop the sequence
2542 (without outputting the insns). */
2545 /* Fall back to floating point rounding optab. */
2546 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2548 /* For non-C99 targets we may end up without a fallback fndecl here
2549 if the user called __builtin_lfloor directly. In this case emit
2550 a call to the floor/ceil variants nevertheless. This should result
2551 in the best user experience for not full C99 targets. */
2552 if (fallback_fndecl
== NULL_TREE
)
2555 const char *name
= NULL
;
2557 switch (DECL_FUNCTION_CODE (fndecl
))
2559 case BUILT_IN_LCEIL
:
2560 case BUILT_IN_LLCEIL
:
2563 case BUILT_IN_LCEILF
:
2564 case BUILT_IN_LLCEILF
:
2567 case BUILT_IN_LCEILL
:
2568 case BUILT_IN_LLCEILL
:
2571 case BUILT_IN_LFLOOR
:
2572 case BUILT_IN_LLFLOOR
:
2575 case BUILT_IN_LFLOORF
:
2576 case BUILT_IN_LLFLOORF
:
2579 case BUILT_IN_LFLOORL
:
2580 case BUILT_IN_LLFLOORL
:
2587 fntype
= build_function_type_list (TREE_TYPE (arg
),
2588 TREE_TYPE (arg
), NULL_TREE
);
2589 fallback_fndecl
= build_fn_decl (name
, fntype
);
2592 exp
= build_call_expr (fallback_fndecl
, 1, arg
);
2594 tmp
= expand_normal (exp
);
2596 /* Truncate the result of floating point optab to integer
2597 via expand_fix (). */
2598 target
= gen_reg_rtx (mode
);
2599 expand_fix (target
, tmp
, 0);
2604 /* Expand a call to one of the builtin math functions doing integer
2606 Return 0 if a normal call should be emitted rather than expanding the
2607 function in-line. EXP is the expression that is a call to the builtin
2608 function; if convenient, the result should be placed in TARGET. */
2611 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2613 convert_optab builtin_optab
;
2615 tree fndecl
= get_callee_fndecl (exp
);
2617 enum machine_mode mode
;
2619 /* There's no easy way to detect the case we need to set EDOM. */
2620 if (flag_errno_math
)
2623 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2626 arg
= CALL_EXPR_ARG (exp
, 0);
2628 switch (DECL_FUNCTION_CODE (fndecl
))
2630 CASE_FLT_FN (BUILT_IN_LRINT
):
2631 CASE_FLT_FN (BUILT_IN_LLRINT
):
2632 builtin_optab
= lrint_optab
; break;
2633 CASE_FLT_FN (BUILT_IN_LROUND
):
2634 CASE_FLT_FN (BUILT_IN_LLROUND
):
2635 builtin_optab
= lround_optab
; break;
2640 /* Make a suitable register to place result in. */
2641 mode
= TYPE_MODE (TREE_TYPE (exp
));
2643 target
= gen_reg_rtx (mode
);
2645 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2646 need to expand the argument again. This way, we will not perform
2647 side-effects more the once. */
2648 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2650 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2654 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2656 /* Output the entire sequence. */
2657 insns
= get_insns ();
2663 /* If we were unable to expand via the builtin, stop the sequence
2664 (without outputting the insns) and call to the library function
2665 with the stabilized argument list. */
2668 target
= expand_call (exp
, target
, target
== const0_rtx
);
2673 /* To evaluate powi(x,n), the floating point value x raised to the
2674 constant integer exponent n, we use a hybrid algorithm that
2675 combines the "window method" with look-up tables. For an
2676 introduction to exponentiation algorithms and "addition chains",
2677 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2678 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2679 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2680 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2682 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2683 multiplications to inline before calling the system library's pow
2684 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2685 so this default never requires calling pow, powf or powl. */
2687 #ifndef POWI_MAX_MULTS
2688 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2691 /* The size of the "optimal power tree" lookup table. All
2692 exponents less than this value are simply looked up in the
2693 powi_table below. This threshold is also used to size the
2694 cache of pseudo registers that hold intermediate results. */
2695 #define POWI_TABLE_SIZE 256
2697 /* The size, in bits of the window, used in the "window method"
2698 exponentiation algorithm. This is equivalent to a radix of
2699 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2700 #define POWI_WINDOW_SIZE 3
2702 /* The following table is an efficient representation of an
2703 "optimal power tree". For each value, i, the corresponding
2704 value, j, in the table states than an optimal evaluation
2705 sequence for calculating pow(x,i) can be found by evaluating
2706 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2707 100 integers is given in Knuth's "Seminumerical algorithms". */
2709 static const unsigned char powi_table
[POWI_TABLE_SIZE
] =
2711 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2712 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2713 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2714 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2715 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2716 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2717 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2718 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2719 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2720 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2721 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2722 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2723 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2724 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2725 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2726 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2727 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2728 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2729 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2730 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2731 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2732 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2733 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2734 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2735 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2736 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2737 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2738 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2739 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2740 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2741 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2742 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2746 /* Return the number of multiplications required to calculate
2747 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2748 subroutine of powi_cost. CACHE is an array indicating
2749 which exponents have already been calculated. */
2752 powi_lookup_cost (unsigned HOST_WIDE_INT n
, bool *cache
)
2754 /* If we've already calculated this exponent, then this evaluation
2755 doesn't require any additional multiplications. */
2760 return powi_lookup_cost (n
- powi_table
[n
], cache
)
2761 + powi_lookup_cost (powi_table
[n
], cache
) + 1;
2764 /* Return the number of multiplications required to calculate
2765 powi(x,n) for an arbitrary x, given the exponent N. This
2766 function needs to be kept in sync with expand_powi below. */
2769 powi_cost (HOST_WIDE_INT n
)
2771 bool cache
[POWI_TABLE_SIZE
];
2772 unsigned HOST_WIDE_INT digit
;
2773 unsigned HOST_WIDE_INT val
;
2779 /* Ignore the reciprocal when calculating the cost. */
2780 val
= (n
< 0) ? -n
: n
;
2782 /* Initialize the exponent cache. */
2783 memset (cache
, 0, POWI_TABLE_SIZE
* sizeof (bool));
2788 while (val
>= POWI_TABLE_SIZE
)
2792 digit
= val
& ((1 << POWI_WINDOW_SIZE
) - 1);
2793 result
+= powi_lookup_cost (digit
, cache
)
2794 + POWI_WINDOW_SIZE
+ 1;
2795 val
>>= POWI_WINDOW_SIZE
;
2804 return result
+ powi_lookup_cost (val
, cache
);
2807 /* Recursive subroutine of expand_powi. This function takes the array,
2808 CACHE, of already calculated exponents and an exponent N and returns
2809 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2812 expand_powi_1 (enum machine_mode mode
, unsigned HOST_WIDE_INT n
, rtx
*cache
)
2814 unsigned HOST_WIDE_INT digit
;
2818 if (n
< POWI_TABLE_SIZE
)
2823 target
= gen_reg_rtx (mode
);
2826 op0
= expand_powi_1 (mode
, n
- powi_table
[n
], cache
);
2827 op1
= expand_powi_1 (mode
, powi_table
[n
], cache
);
2831 target
= gen_reg_rtx (mode
);
2832 digit
= n
& ((1 << POWI_WINDOW_SIZE
) - 1);
2833 op0
= expand_powi_1 (mode
, n
- digit
, cache
);
2834 op1
= expand_powi_1 (mode
, digit
, cache
);
2838 target
= gen_reg_rtx (mode
);
2839 op0
= expand_powi_1 (mode
, n
>> 1, cache
);
2843 result
= expand_mult (mode
, op0
, op1
, target
, 0);
2844 if (result
!= target
)
2845 emit_move_insn (target
, result
);
2849 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2850 floating point operand in mode MODE, and N is the exponent. This
2851 function needs to be kept in sync with powi_cost above. */
2854 expand_powi (rtx x
, enum machine_mode mode
, HOST_WIDE_INT n
)
2856 unsigned HOST_WIDE_INT val
;
2857 rtx cache
[POWI_TABLE_SIZE
];
2861 return CONST1_RTX (mode
);
2863 val
= (n
< 0) ? -n
: n
;
2865 memset (cache
, 0, sizeof (cache
));
2868 result
= expand_powi_1 (mode
, (n
< 0) ? -n
: n
, cache
);
2870 /* If the original exponent was negative, reciprocate the result. */
2872 result
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2873 result
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2878 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2879 a normal call should be emitted rather than expanding the function
2880 in-line. EXP is the expression that is a call to the builtin
2881 function; if convenient, the result should be placed in TARGET. */
2884 expand_builtin_pow (tree exp
, rtx target
, rtx subtarget
)
2888 tree type
= TREE_TYPE (exp
);
2889 REAL_VALUE_TYPE cint
, c
, c2
;
2892 enum machine_mode mode
= TYPE_MODE (type
);
2894 if (! validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2897 arg0
= CALL_EXPR_ARG (exp
, 0);
2898 arg1
= CALL_EXPR_ARG (exp
, 1);
2900 if (TREE_CODE (arg1
) != REAL_CST
2901 || TREE_OVERFLOW (arg1
))
2902 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
2904 /* Handle constant exponents. */
2906 /* For integer valued exponents we can expand to an optimal multiplication
2907 sequence using expand_powi. */
2908 c
= TREE_REAL_CST (arg1
);
2909 n
= real_to_integer (&c
);
2910 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2911 if (real_identical (&c
, &cint
)
2912 && ((n
>= -1 && n
<= 2)
2913 || (flag_unsafe_math_optimizations
2915 && powi_cost (n
) <= POWI_MAX_MULTS
)))
2917 op
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2920 op
= force_reg (mode
, op
);
2921 op
= expand_powi (op
, mode
, n
);
2926 narg0
= builtin_save_expr (arg0
);
2928 /* If the exponent is not integer valued, check if it is half of an integer.
2929 In this case we can expand to sqrt (x) * x**(n/2). */
2930 fn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
2931 if (fn
!= NULL_TREE
)
2933 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst2
);
2934 n
= real_to_integer (&c2
);
2935 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2936 if (real_identical (&c2
, &cint
)
2937 && ((flag_unsafe_math_optimizations
2939 && powi_cost (n
/2) <= POWI_MAX_MULTS
)
2942 tree call_expr
= build_call_expr (fn
, 1, narg0
);
2943 /* Use expand_expr in case the newly built call expression
2944 was folded to a non-call. */
2945 op
= expand_expr (call_expr
, subtarget
, mode
, EXPAND_NORMAL
);
2948 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2949 op2
= force_reg (mode
, op2
);
2950 op2
= expand_powi (op2
, mode
, abs (n
/ 2));
2951 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
2952 0, OPTAB_LIB_WIDEN
);
2953 /* If the original exponent was negative, reciprocate the
2956 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2957 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2963 /* Try if the exponent is a third of an integer. In this case
2964 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2965 different from pow (x, 1./3.) due to rounding and behavior
2966 with negative x we need to constrain this transformation to
2967 unsafe math and positive x or finite math. */
2968 fn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
2970 && flag_unsafe_math_optimizations
2971 && (tree_expr_nonnegative_p (arg0
)
2972 || !HONOR_NANS (mode
)))
2974 REAL_VALUE_TYPE dconst3
;
2975 real_from_integer (&dconst3
, VOIDmode
, 3, 0, 0);
2976 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst3
);
2977 real_round (&c2
, mode
, &c2
);
2978 n
= real_to_integer (&c2
);
2979 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2980 real_arithmetic (&c2
, RDIV_EXPR
, &cint
, &dconst3
);
2981 real_convert (&c2
, mode
, &c2
);
2982 if (real_identical (&c2
, &c
)
2984 && powi_cost (n
/3) <= POWI_MAX_MULTS
)
2987 tree call_expr
= build_call_expr (fn
, 1,narg0
);
2988 op
= expand_builtin (call_expr
, NULL_RTX
, subtarget
, mode
, 0);
2989 if (abs (n
) % 3 == 2)
2990 op
= expand_simple_binop (mode
, MULT
, op
, op
, op
,
2991 0, OPTAB_LIB_WIDEN
);
2994 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2995 op2
= force_reg (mode
, op2
);
2996 op2
= expand_powi (op2
, mode
, abs (n
/ 3));
2997 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
2998 0, OPTAB_LIB_WIDEN
);
2999 /* If the original exponent was negative, reciprocate the
3002 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
3003 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
3009 /* Fall back to optab expansion. */
3010 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
3013 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3014 a normal call should be emitted rather than expanding the function
3015 in-line. EXP is the expression that is a call to the builtin
3016 function; if convenient, the result should be placed in TARGET. */
3019 expand_builtin_powi (tree exp
, rtx target
, rtx subtarget
)
3023 enum machine_mode mode
;
3024 enum machine_mode mode2
;
3026 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3029 arg0
= CALL_EXPR_ARG (exp
, 0);
3030 arg1
= CALL_EXPR_ARG (exp
, 1);
3031 mode
= TYPE_MODE (TREE_TYPE (exp
));
3033 /* Handle constant power. */
3035 if (TREE_CODE (arg1
) == INTEGER_CST
3036 && !TREE_OVERFLOW (arg1
))
3038 HOST_WIDE_INT n
= TREE_INT_CST_LOW (arg1
);
3040 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3041 Otherwise, check the number of multiplications required. */
3042 if ((TREE_INT_CST_HIGH (arg1
) == 0
3043 || TREE_INT_CST_HIGH (arg1
) == -1)
3044 && ((n
>= -1 && n
<= 2)
3046 && powi_cost (n
) <= POWI_MAX_MULTS
)))
3048 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
3049 op0
= force_reg (mode
, op0
);
3050 return expand_powi (op0
, mode
, n
);
3054 /* Emit a libcall to libgcc. */
3056 /* Mode of the 2nd argument must match that of an int. */
3057 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
3059 if (target
== NULL_RTX
)
3060 target
= gen_reg_rtx (mode
);
3062 op0
= expand_expr (arg0
, subtarget
, mode
, EXPAND_NORMAL
);
3063 if (GET_MODE (op0
) != mode
)
3064 op0
= convert_to_mode (mode
, op0
, 0);
3065 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
3066 if (GET_MODE (op1
) != mode2
)
3067 op1
= convert_to_mode (mode2
, op1
, 0);
3069 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
3070 target
, LCT_CONST
, mode
, 2,
3071 op0
, mode
, op1
, mode2
);
3076 /* Expand expression EXP which is a call to the strlen builtin. Return
3077 NULL_RTX if we failed the caller should emit a normal call, otherwise
3078 try to get the result in TARGET, if convenient. */
3081 expand_builtin_strlen (tree exp
, rtx target
,
3082 enum machine_mode target_mode
)
3084 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3090 tree src
= CALL_EXPR_ARG (exp
, 0);
3091 rtx result
, src_reg
, char_rtx
, before_strlen
;
3092 enum machine_mode insn_mode
= target_mode
, char_mode
;
3093 enum insn_code icode
= CODE_FOR_nothing
;
3096 /* If the length can be computed at compile-time, return it. */
3097 len
= c_strlen (src
, 0);
3099 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3101 /* If the length can be computed at compile-time and is constant
3102 integer, but there are side-effects in src, evaluate
3103 src for side-effects, then return len.
3104 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3105 can be optimized into: i++; x = 3; */
3106 len
= c_strlen (src
, 1);
3107 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3109 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3110 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3113 align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3115 /* If SRC is not a pointer type, don't do this operation inline. */
3119 /* Bail out if we can't compute strlen in the right mode. */
3120 while (insn_mode
!= VOIDmode
)
3122 icode
= optab_handler (strlen_optab
, insn_mode
)->insn_code
;
3123 if (icode
!= CODE_FOR_nothing
)
3126 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3128 if (insn_mode
== VOIDmode
)
3131 /* Make a place to write the result of the instruction. */
3135 && GET_MODE (result
) == insn_mode
3136 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3137 result
= gen_reg_rtx (insn_mode
);
3139 /* Make a place to hold the source address. We will not expand
3140 the actual source until we are sure that the expansion will
3141 not fail -- there are trees that cannot be expanded twice. */
3142 src_reg
= gen_reg_rtx (Pmode
);
3144 /* Mark the beginning of the strlen sequence so we can emit the
3145 source operand later. */
3146 before_strlen
= get_last_insn ();
3148 char_rtx
= const0_rtx
;
3149 char_mode
= insn_data
[(int) icode
].operand
[2].mode
;
3150 if (! (*insn_data
[(int) icode
].operand
[2].predicate
) (char_rtx
,
3152 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
3154 pat
= GEN_FCN (icode
) (result
, gen_rtx_MEM (BLKmode
, src_reg
),
3155 char_rtx
, GEN_INT (align
));
3160 /* Now that we are assured of success, expand the source. */
3162 pat
= expand_expr (src
, src_reg
, ptr_mode
, EXPAND_NORMAL
);
3164 emit_move_insn (src_reg
, pat
);
3169 emit_insn_after (pat
, before_strlen
);
3171 emit_insn_before (pat
, get_insns ());
3173 /* Return the value in the proper mode for this function. */
3174 if (GET_MODE (result
) == target_mode
)
3176 else if (target
!= 0)
3177 convert_move (target
, result
, 0);
3179 target
= convert_to_mode (target_mode
, result
, 0);
3185 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3186 caller should emit a normal call, otherwise try to get the result
3187 in TARGET, if convenient (and in mode MODE if that's convenient). */
3190 expand_builtin_strstr (tree exp
, rtx target
, enum machine_mode mode
)
3192 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3194 tree type
= TREE_TYPE (exp
);
3195 tree result
= fold_builtin_strstr (CALL_EXPR_ARG (exp
, 0),
3196 CALL_EXPR_ARG (exp
, 1), type
);
3198 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3203 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3204 caller should emit a normal call, otherwise try to get the result
3205 in TARGET, if convenient (and in mode MODE if that's convenient). */
3208 expand_builtin_strchr (tree exp
, rtx target
, enum machine_mode mode
)
3210 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3212 tree type
= TREE_TYPE (exp
);
3213 tree result
= fold_builtin_strchr (CALL_EXPR_ARG (exp
, 0),
3214 CALL_EXPR_ARG (exp
, 1), type
);
3216 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3218 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3223 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3224 caller should emit a normal call, otherwise try to get the result
3225 in TARGET, if convenient (and in mode MODE if that's convenient). */
3228 expand_builtin_strrchr (tree exp
, rtx target
, enum machine_mode mode
)
3230 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3232 tree type
= TREE_TYPE (exp
);
3233 tree result
= fold_builtin_strrchr (CALL_EXPR_ARG (exp
, 0),
3234 CALL_EXPR_ARG (exp
, 1), type
);
3236 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3241 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3242 caller should emit a normal call, otherwise try to get the result
3243 in TARGET, if convenient (and in mode MODE if that's convenient). */
3246 expand_builtin_strpbrk (tree exp
, rtx target
, enum machine_mode mode
)
3248 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3250 tree type
= TREE_TYPE (exp
);
3251 tree result
= fold_builtin_strpbrk (CALL_EXPR_ARG (exp
, 0),
3252 CALL_EXPR_ARG (exp
, 1), type
);
3254 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3259 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3260 bytes from constant string DATA + OFFSET and return it as target
3264 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3265 enum machine_mode mode
)
3267 const char *str
= (const char *) data
;
3269 gcc_assert (offset
>= 0
3270 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3271 <= strlen (str
) + 1));
3273 return c_readstr (str
+ offset
, mode
);
3276 /* Expand a call EXP to the memcpy builtin.
3277 Return NULL_RTX if we failed, the caller should emit a normal call,
3278 otherwise try to get the result in TARGET, if convenient (and in
3279 mode MODE if that's convenient). */
3282 expand_builtin_memcpy (tree exp
, rtx target
, enum machine_mode mode
)
3284 tree fndecl
= get_callee_fndecl (exp
);
3286 if (!validate_arglist (exp
,
3287 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3291 tree dest
= CALL_EXPR_ARG (exp
, 0);
3292 tree src
= CALL_EXPR_ARG (exp
, 1);
3293 tree len
= CALL_EXPR_ARG (exp
, 2);
3294 const char *src_str
;
3295 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3296 unsigned int dest_align
3297 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3298 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3299 tree result
= fold_builtin_memory_op (dest
, src
, len
,
3300 TREE_TYPE (TREE_TYPE (fndecl
)),
3302 HOST_WIDE_INT expected_size
= -1;
3303 unsigned int expected_align
= 0;
3304 tree_ann_common_t ann
;
3308 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3310 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3312 result
= TREE_OPERAND (result
, 1);
3314 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3317 /* If DEST is not a pointer type, call the normal function. */
3318 if (dest_align
== 0)
3321 /* If either SRC is not a pointer type, don't do this
3322 operation in-line. */
3326 ann
= tree_common_ann (exp
);
3328 stringop_block_profile (ann
->stmt
, &expected_align
, &expected_size
);
3330 if (expected_align
< dest_align
)
3331 expected_align
= dest_align
;
3332 dest_mem
= get_memory_rtx (dest
, len
);
3333 set_mem_align (dest_mem
, dest_align
);
3334 len_rtx
= expand_normal (len
);
3335 src_str
= c_getstr (src
);
3337 /* If SRC is a string constant and block move would be done
3338 by pieces, we can avoid loading the string from memory
3339 and only stored the computed constants. */
3341 && GET_CODE (len_rtx
) == CONST_INT
3342 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3343 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3344 CONST_CAST (char *, src_str
),
3347 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3348 builtin_memcpy_read_str
,
3349 CONST_CAST (char *, src_str
),
3350 dest_align
, false, 0);
3351 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3352 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3356 src_mem
= get_memory_rtx (src
, len
);
3357 set_mem_align (src_mem
, src_align
);
3359 /* Copy word part most expediently. */
3360 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3361 CALL_EXPR_TAILCALL (exp
)
3362 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3363 expected_align
, expected_size
);
3367 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3368 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3374 /* Expand a call EXP to the mempcpy builtin.
3375 Return NULL_RTX if we failed; the caller should emit a normal call,
3376 otherwise try to get the result in TARGET, if convenient (and in
3377 mode MODE if that's convenient). If ENDP is 0 return the
3378 destination pointer, if ENDP is 1 return the end pointer ala
3379 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3383 expand_builtin_mempcpy(tree exp
, rtx target
, enum machine_mode mode
)
3385 if (!validate_arglist (exp
,
3386 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3390 tree dest
= CALL_EXPR_ARG (exp
, 0);
3391 tree src
= CALL_EXPR_ARG (exp
, 1);
3392 tree len
= CALL_EXPR_ARG (exp
, 2);
3393 return expand_builtin_mempcpy_args (dest
, src
, len
,
3395 target
, mode
, /*endp=*/ 1);
3399 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3400 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3401 so that this can also be called without constructing an actual CALL_EXPR.
3402 TYPE is the return type of the call. The other arguments and return value
3403 are the same as for expand_builtin_mempcpy. */
3406 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
, tree type
,
3407 rtx target
, enum machine_mode mode
, int endp
)
3409 /* If return value is ignored, transform mempcpy into memcpy. */
3410 if (target
== const0_rtx
)
3412 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
3417 return expand_expr (build_call_expr (fn
, 3, dest
, src
, len
),
3418 target
, mode
, EXPAND_NORMAL
);
3422 const char *src_str
;
3423 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3424 unsigned int dest_align
3425 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3426 rtx dest_mem
, src_mem
, len_rtx
;
3427 tree result
= fold_builtin_memory_op (dest
, src
, len
, type
, false, endp
);
3431 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3433 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3435 result
= TREE_OPERAND (result
, 1);
3437 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3440 /* If either SRC or DEST is not a pointer type, don't do this
3441 operation in-line. */
3442 if (dest_align
== 0 || src_align
== 0)
3445 /* If LEN is not constant, call the normal function. */
3446 if (! host_integerp (len
, 1))
3449 len_rtx
= expand_normal (len
);
3450 src_str
= c_getstr (src
);
3452 /* If SRC is a string constant and block move would be done
3453 by pieces, we can avoid loading the string from memory
3454 and only stored the computed constants. */
3456 && GET_CODE (len_rtx
) == CONST_INT
3457 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3458 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3459 CONST_CAST (char *, src_str
),
3462 dest_mem
= get_memory_rtx (dest
, len
);
3463 set_mem_align (dest_mem
, dest_align
);
3464 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3465 builtin_memcpy_read_str
,
3466 CONST_CAST (char *, src_str
),
3467 dest_align
, false, endp
);
3468 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3469 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3473 if (GET_CODE (len_rtx
) == CONST_INT
3474 && can_move_by_pieces (INTVAL (len_rtx
),
3475 MIN (dest_align
, src_align
)))
3477 dest_mem
= get_memory_rtx (dest
, len
);
3478 set_mem_align (dest_mem
, dest_align
);
3479 src_mem
= get_memory_rtx (src
, len
);
3480 set_mem_align (src_mem
, src_align
);
3481 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3482 MIN (dest_align
, src_align
), endp
);
3483 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3484 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3492 /* Expand expression EXP, which is a call to the memmove builtin. Return
3493 NULL_RTX if we failed; the caller should emit a normal call. */
3496 expand_builtin_memmove (tree exp
, rtx target
, enum machine_mode mode
, int ignore
)
3498 if (!validate_arglist (exp
,
3499 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3503 tree dest
= CALL_EXPR_ARG (exp
, 0);
3504 tree src
= CALL_EXPR_ARG (exp
, 1);
3505 tree len
= CALL_EXPR_ARG (exp
, 2);
3506 return expand_builtin_memmove_args (dest
, src
, len
, TREE_TYPE (exp
),
3507 target
, mode
, ignore
);
3511 /* Helper function to do the actual work for expand_builtin_memmove. The
3512 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3513 so that this can also be called without constructing an actual CALL_EXPR.
3514 TYPE is the return type of the call. The other arguments and return value
3515 are the same as for expand_builtin_memmove. */
3518 expand_builtin_memmove_args (tree dest
, tree src
, tree len
,
3519 tree type
, rtx target
, enum machine_mode mode
,
3522 tree result
= fold_builtin_memory_op (dest
, src
, len
, type
, ignore
, /*endp=*/3);
3526 STRIP_TYPE_NOPS (result
);
3527 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3529 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3531 result
= TREE_OPERAND (result
, 1);
3533 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3536 /* Otherwise, call the normal function. */
3540 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3541 NULL_RTX if we failed the caller should emit a normal call. */
3544 expand_builtin_bcopy (tree exp
, int ignore
)
3546 tree type
= TREE_TYPE (exp
);
3547 tree src
, dest
, size
;
3549 if (!validate_arglist (exp
,
3550 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3553 src
= CALL_EXPR_ARG (exp
, 0);
3554 dest
= CALL_EXPR_ARG (exp
, 1);
3555 size
= CALL_EXPR_ARG (exp
, 2);
3557 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3558 This is done this way so that if it isn't expanded inline, we fall
3559 back to calling bcopy instead of memmove. */
3560 return expand_builtin_memmove_args (dest
, src
,
3561 fold_convert (sizetype
, size
),
3562 type
, const0_rtx
, VOIDmode
,
3567 # define HAVE_movstr 0
3568 # define CODE_FOR_movstr CODE_FOR_nothing
3571 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3572 we failed, the caller should emit a normal call, otherwise try to
3573 get the result in TARGET, if convenient. If ENDP is 0 return the
3574 destination pointer, if ENDP is 1 return the end pointer ala
3575 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3579 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3585 const struct insn_data
* data
;
3590 dest_mem
= get_memory_rtx (dest
, NULL
);
3591 src_mem
= get_memory_rtx (src
, NULL
);
3594 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3595 dest_mem
= replace_equiv_address (dest_mem
, target
);
3596 end
= gen_reg_rtx (Pmode
);
3600 if (target
== 0 || target
== const0_rtx
)
3602 end
= gen_reg_rtx (Pmode
);
3610 data
= insn_data
+ CODE_FOR_movstr
;
3612 if (data
->operand
[0].mode
!= VOIDmode
)
3613 end
= gen_lowpart (data
->operand
[0].mode
, end
);
3615 insn
= data
->genfun (end
, dest_mem
, src_mem
);
3621 /* movstr is supposed to set end to the address of the NUL
3622 terminator. If the caller requested a mempcpy-like return value,
3624 if (endp
== 1 && target
!= const0_rtx
)
3626 rtx tem
= plus_constant (gen_lowpart (GET_MODE (target
), end
), 1);
3627 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3633 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3634 NULL_RTX if we failed the caller should emit a normal call, otherwise
3635 try to get the result in TARGET, if convenient (and in mode MODE if that's
3639 expand_builtin_strcpy (tree fndecl
, tree exp
, rtx target
, enum machine_mode mode
)
3641 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3643 tree dest
= CALL_EXPR_ARG (exp
, 0);
3644 tree src
= CALL_EXPR_ARG (exp
, 1);
3645 return expand_builtin_strcpy_args (fndecl
, dest
, src
, target
, mode
);
3650 /* Helper function to do the actual work for expand_builtin_strcpy. The
3651 arguments to the builtin_strcpy call DEST and SRC are broken out
3652 so that this can also be called without constructing an actual CALL_EXPR.
3653 The other arguments and return value are the same as for
3654 expand_builtin_strcpy. */
3657 expand_builtin_strcpy_args (tree fndecl
, tree dest
, tree src
,
3658 rtx target
, enum machine_mode mode
)
3660 tree result
= fold_builtin_strcpy (fndecl
, dest
, src
, 0);
3662 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3663 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3667 /* Expand a call EXP to the stpcpy builtin.
3668 Return NULL_RTX if we failed the caller should emit a normal call,
3669 otherwise try to get the result in TARGET, if convenient (and in
3670 mode MODE if that's convenient). */
3673 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3677 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3680 dst
= CALL_EXPR_ARG (exp
, 0);
3681 src
= CALL_EXPR_ARG (exp
, 1);
3683 /* If return value is ignored, transform stpcpy into strcpy. */
3684 if (target
== const0_rtx
)
3686 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
3690 return expand_expr (build_call_expr (fn
, 2, dst
, src
),
3691 target
, mode
, EXPAND_NORMAL
);
3698 /* Ensure we get an actual string whose length can be evaluated at
3699 compile-time, not an expression containing a string. This is
3700 because the latter will potentially produce pessimized code
3701 when used to produce the return value. */
3702 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3703 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3705 lenp1
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
3706 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
, TREE_TYPE (exp
),
3707 target
, mode
, /*endp=*/2);
3712 if (TREE_CODE (len
) == INTEGER_CST
)
3714 rtx len_rtx
= expand_normal (len
);
3716 if (GET_CODE (len_rtx
) == CONST_INT
)
3718 ret
= expand_builtin_strcpy_args (get_callee_fndecl (exp
),
3719 dst
, src
, target
, mode
);
3725 if (mode
!= VOIDmode
)
3726 target
= gen_reg_rtx (mode
);
3728 target
= gen_reg_rtx (GET_MODE (ret
));
3730 if (GET_MODE (target
) != GET_MODE (ret
))
3731 ret
= gen_lowpart (GET_MODE (target
), ret
);
3733 ret
= plus_constant (ret
, INTVAL (len_rtx
));
3734 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3742 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3746 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3747 bytes from constant string DATA + OFFSET and return it as target
3751 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3752 enum machine_mode mode
)
3754 const char *str
= (const char *) data
;
3756 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3759 return c_readstr (str
+ offset
, mode
);
3762 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3763 NULL_RTX if we failed the caller should emit a normal call. */
3766 expand_builtin_strncpy (tree exp
, rtx target
, enum machine_mode mode
)
3768 tree fndecl
= get_callee_fndecl (exp
);
3770 if (validate_arglist (exp
,
3771 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3773 tree dest
= CALL_EXPR_ARG (exp
, 0);
3774 tree src
= CALL_EXPR_ARG (exp
, 1);
3775 tree len
= CALL_EXPR_ARG (exp
, 2);
3776 tree slen
= c_strlen (src
, 1);
3777 tree result
= fold_builtin_strncpy (fndecl
, dest
, src
, len
, slen
);
3781 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3783 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3785 result
= TREE_OPERAND (result
, 1);
3787 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3790 /* We must be passed a constant len and src parameter. */
3791 if (!host_integerp (len
, 1) || !slen
|| !host_integerp (slen
, 1))
3794 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
3796 /* We're required to pad with trailing zeros if the requested
3797 len is greater than strlen(s2)+1. In that case try to
3798 use store_by_pieces, if it fails, punt. */
3799 if (tree_int_cst_lt (slen
, len
))
3801 unsigned int dest_align
3802 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3803 const char *p
= c_getstr (src
);
3806 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
3807 || !can_store_by_pieces (tree_low_cst (len
, 1),
3808 builtin_strncpy_read_str
,
3809 CONST_CAST (char *, p
),
3813 dest_mem
= get_memory_rtx (dest
, len
);
3814 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3815 builtin_strncpy_read_str
,
3816 CONST_CAST (char *, p
), dest_align
, false, 0);
3817 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3818 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3825 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3826 bytes from constant string DATA + OFFSET and return it as target
3830 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3831 enum machine_mode mode
)
3833 const char *c
= (const char *) data
;
3834 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3836 memset (p
, *c
, GET_MODE_SIZE (mode
));
3838 return c_readstr (p
, mode
);
3841 /* Callback routine for store_by_pieces. Return the RTL of a register
3842 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3843 char value given in the RTL register data. For example, if mode is
3844 4 bytes wide, return the RTL for 0x01010101*data. */
3847 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3848 enum machine_mode mode
)
3854 size
= GET_MODE_SIZE (mode
);
3858 p
= XALLOCAVEC (char, size
);
3859 memset (p
, 1, size
);
3860 coeff
= c_readstr (p
, mode
);
3862 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3863 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3864 return force_reg (mode
, target
);
3867 /* Expand expression EXP, which is a call to the memset builtin. Return
3868 NULL_RTX if we failed the caller should emit a normal call, otherwise
3869 try to get the result in TARGET, if convenient (and in mode MODE if that's
3873 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3875 if (!validate_arglist (exp
,
3876 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3880 tree dest
= CALL_EXPR_ARG (exp
, 0);
3881 tree val
= CALL_EXPR_ARG (exp
, 1);
3882 tree len
= CALL_EXPR_ARG (exp
, 2);
3883 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3887 /* Helper function to do the actual work for expand_builtin_memset. The
3888 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3889 so that this can also be called without constructing an actual CALL_EXPR.
3890 The other arguments and return value are the same as for
3891 expand_builtin_memset. */
3894 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3895 rtx target
, enum machine_mode mode
, tree orig_exp
)
3898 enum built_in_function fcode
;
3900 unsigned int dest_align
;
3901 rtx dest_mem
, dest_addr
, len_rtx
;
3902 HOST_WIDE_INT expected_size
= -1;
3903 unsigned int expected_align
= 0;
3904 tree_ann_common_t ann
;
3906 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3908 /* If DEST is not a pointer type, don't do this operation in-line. */
3909 if (dest_align
== 0)
3912 ann
= tree_common_ann (orig_exp
);
3914 stringop_block_profile (ann
->stmt
, &expected_align
, &expected_size
);
3916 if (expected_align
< dest_align
)
3917 expected_align
= dest_align
;
3919 /* If the LEN parameter is zero, return DEST. */
3920 if (integer_zerop (len
))
3922 /* Evaluate and ignore VAL in case it has side-effects. */
3923 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3924 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3927 /* Stabilize the arguments in case we fail. */
3928 dest
= builtin_save_expr (dest
);
3929 val
= builtin_save_expr (val
);
3930 len
= builtin_save_expr (len
);
3932 len_rtx
= expand_normal (len
);
3933 dest_mem
= get_memory_rtx (dest
, len
);
3935 if (TREE_CODE (val
) != INTEGER_CST
)
3939 val_rtx
= expand_normal (val
);
3940 val_rtx
= convert_to_mode (TYPE_MODE (unsigned_char_type_node
),
3943 /* Assume that we can memset by pieces if we can store
3944 * the coefficients by pieces (in the required modes).
3945 * We can't pass builtin_memset_gen_str as that emits RTL. */
3947 if (host_integerp (len
, 1)
3948 && can_store_by_pieces (tree_low_cst (len
, 1),
3949 builtin_memset_read_str
, &c
, dest_align
,
3952 val_rtx
= force_reg (TYPE_MODE (unsigned_char_type_node
),
3954 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3955 builtin_memset_gen_str
, val_rtx
, dest_align
,
3958 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3959 dest_align
, expected_align
,
3963 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3964 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3968 if (target_char_cast (val
, &c
))
3973 if (host_integerp (len
, 1)
3974 && can_store_by_pieces (tree_low_cst (len
, 1),
3975 builtin_memset_read_str
, &c
, dest_align
,
3977 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3978 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3979 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, GEN_INT (c
),
3980 dest_align
, expected_align
,
3984 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3985 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3989 set_mem_align (dest_mem
, dest_align
);
3990 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3991 CALL_EXPR_TAILCALL (orig_exp
)
3992 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3993 expected_align
, expected_size
);
3997 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3998 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4004 fndecl
= get_callee_fndecl (orig_exp
);
4005 fcode
= DECL_FUNCTION_CODE (fndecl
);
4006 if (fcode
== BUILT_IN_MEMSET
)
4007 fn
= build_call_expr (fndecl
, 3, dest
, val
, len
);
4008 else if (fcode
== BUILT_IN_BZERO
)
4009 fn
= build_call_expr (fndecl
, 2, dest
, len
);
4012 if (TREE_CODE (fn
) == CALL_EXPR
)
4013 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4014 return expand_call (fn
, target
, target
== const0_rtx
);
4017 /* Expand expression EXP, which is a call to the bzero builtin. Return
4018 NULL_RTX if we failed the caller should emit a normal call. */
4021 expand_builtin_bzero (tree exp
)
4025 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4028 dest
= CALL_EXPR_ARG (exp
, 0);
4029 size
= CALL_EXPR_ARG (exp
, 1);
4031 /* New argument list transforming bzero(ptr x, int y) to
4032 memset(ptr x, int 0, size_t y). This is done this way
4033 so that if it isn't expanded inline, we fallback to
4034 calling bzero instead of memset. */
4036 return expand_builtin_memset_args (dest
, integer_zero_node
,
4037 fold_convert (sizetype
, size
),
4038 const0_rtx
, VOIDmode
, exp
);
4041 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4042 caller should emit a normal call, otherwise try to get the result
4043 in TARGET, if convenient (and in mode MODE if that's convenient). */
4046 expand_builtin_memchr (tree exp
, rtx target
, enum machine_mode mode
)
4048 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
,
4049 INTEGER_TYPE
, VOID_TYPE
))
4051 tree type
= TREE_TYPE (exp
);
4052 tree result
= fold_builtin_memchr (CALL_EXPR_ARG (exp
, 0),
4053 CALL_EXPR_ARG (exp
, 1),
4054 CALL_EXPR_ARG (exp
, 2), type
);
4056 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4061 /* Expand expression EXP, which is a call to the memcmp built-in function.
4062 Return NULL_RTX if we failed and the
4063 caller should emit a normal call, otherwise try to get the result in
4064 TARGET, if convenient (and in mode MODE, if that's convenient). */
4067 expand_builtin_memcmp (tree exp
, rtx target
, enum machine_mode mode
)
4069 if (!validate_arglist (exp
,
4070 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4074 tree result
= fold_builtin_memcmp (CALL_EXPR_ARG (exp
, 0),
4075 CALL_EXPR_ARG (exp
, 1),
4076 CALL_EXPR_ARG (exp
, 2));
4078 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4081 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4083 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4086 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4087 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4088 tree len
= CALL_EXPR_ARG (exp
, 2);
4091 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4093 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4094 enum machine_mode insn_mode
;
4096 #ifdef HAVE_cmpmemsi
4098 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
4101 #ifdef HAVE_cmpstrnsi
4103 insn_mode
= insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4108 /* If we don't have POINTER_TYPE, call the function. */
4109 if (arg1_align
== 0 || arg2_align
== 0)
4112 /* Make a place to write the result of the instruction. */
4115 && REG_P (result
) && GET_MODE (result
) == insn_mode
4116 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4117 result
= gen_reg_rtx (insn_mode
);
4119 arg1_rtx
= get_memory_rtx (arg1
, len
);
4120 arg2_rtx
= get_memory_rtx (arg2
, len
);
4121 arg3_rtx
= expand_normal (len
);
4123 /* Set MEM_SIZE as appropriate. */
4124 if (GET_CODE (arg3_rtx
) == CONST_INT
)
4126 set_mem_size (arg1_rtx
, arg3_rtx
);
4127 set_mem_size (arg2_rtx
, arg3_rtx
);
4130 #ifdef HAVE_cmpmemsi
4132 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4133 GEN_INT (MIN (arg1_align
, arg2_align
)));
4136 #ifdef HAVE_cmpstrnsi
4138 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4139 GEN_INT (MIN (arg1_align
, arg2_align
)));
4147 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
4148 TYPE_MODE (integer_type_node
), 3,
4149 XEXP (arg1_rtx
, 0), Pmode
,
4150 XEXP (arg2_rtx
, 0), Pmode
,
4151 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4152 TYPE_UNSIGNED (sizetype
)),
4153 TYPE_MODE (sizetype
));
4155 /* Return the value in the proper mode for this function. */
4156 mode
= TYPE_MODE (TREE_TYPE (exp
));
4157 if (GET_MODE (result
) == mode
)
4159 else if (target
!= 0)
4161 convert_move (target
, result
, 0);
4165 return convert_to_mode (mode
, result
, 0);
4172 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4173 if we failed the caller should emit a normal call, otherwise try to get
4174 the result in TARGET, if convenient. */
4177 expand_builtin_strcmp (tree exp
, rtx target
, enum machine_mode mode
)
4179 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4183 tree result
= fold_builtin_strcmp (CALL_EXPR_ARG (exp
, 0),
4184 CALL_EXPR_ARG (exp
, 1));
4186 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4189 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4190 if (cmpstr_optab
[SImode
] != CODE_FOR_nothing
4191 || cmpstrn_optab
[SImode
] != CODE_FOR_nothing
)
4193 rtx arg1_rtx
, arg2_rtx
;
4194 rtx result
, insn
= NULL_RTX
;
4196 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4197 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4200 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4202 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4204 /* If we don't have POINTER_TYPE, call the function. */
4205 if (arg1_align
== 0 || arg2_align
== 0)
4208 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4209 arg1
= builtin_save_expr (arg1
);
4210 arg2
= builtin_save_expr (arg2
);
4212 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4213 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4215 #ifdef HAVE_cmpstrsi
4216 /* Try to call cmpstrsi. */
4219 enum machine_mode insn_mode
4220 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
4222 /* Make a place to write the result of the instruction. */
4225 && REG_P (result
) && GET_MODE (result
) == insn_mode
4226 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4227 result
= gen_reg_rtx (insn_mode
);
4229 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
4230 GEN_INT (MIN (arg1_align
, arg2_align
)));
4233 #ifdef HAVE_cmpstrnsi
4234 /* Try to determine at least one length and call cmpstrnsi. */
4235 if (!insn
&& HAVE_cmpstrnsi
)
4240 enum machine_mode insn_mode
4241 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4242 tree len1
= c_strlen (arg1
, 1);
4243 tree len2
= c_strlen (arg2
, 1);
4246 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4248 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4250 /* If we don't have a constant length for the first, use the length
4251 of the second, if we know it. We don't require a constant for
4252 this case; some cost analysis could be done if both are available
4253 but neither is constant. For now, assume they're equally cheap,
4254 unless one has side effects. If both strings have constant lengths,
4261 else if (TREE_SIDE_EFFECTS (len1
))
4263 else if (TREE_SIDE_EFFECTS (len2
))
4265 else if (TREE_CODE (len1
) != INTEGER_CST
)
4267 else if (TREE_CODE (len2
) != INTEGER_CST
)
4269 else if (tree_int_cst_lt (len1
, len2
))
4274 /* If both arguments have side effects, we cannot optimize. */
4275 if (!len
|| TREE_SIDE_EFFECTS (len
))
4278 arg3_rtx
= expand_normal (len
);
4280 /* Make a place to write the result of the instruction. */
4283 && REG_P (result
) && GET_MODE (result
) == insn_mode
4284 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4285 result
= gen_reg_rtx (insn_mode
);
4287 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4288 GEN_INT (MIN (arg1_align
, arg2_align
)));
4296 /* Return the value in the proper mode for this function. */
4297 mode
= TYPE_MODE (TREE_TYPE (exp
));
4298 if (GET_MODE (result
) == mode
)
4301 return convert_to_mode (mode
, result
, 0);
4302 convert_move (target
, result
, 0);
4306 /* Expand the library call ourselves using a stabilized argument
4307 list to avoid re-evaluating the function's arguments twice. */
4308 #ifdef HAVE_cmpstrnsi
4311 fndecl
= get_callee_fndecl (exp
);
4312 fn
= build_call_expr (fndecl
, 2, arg1
, arg2
);
4313 if (TREE_CODE (fn
) == CALL_EXPR
)
4314 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4315 return expand_call (fn
, target
, target
== const0_rtx
);
4321 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4322 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4323 the result in TARGET, if convenient. */
4326 expand_builtin_strncmp (tree exp
, rtx target
, enum machine_mode mode
)
4328 if (!validate_arglist (exp
,
4329 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4333 tree result
= fold_builtin_strncmp (CALL_EXPR_ARG (exp
, 0),
4334 CALL_EXPR_ARG (exp
, 1),
4335 CALL_EXPR_ARG (exp
, 2));
4337 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4340 /* If c_strlen can determine an expression for one of the string
4341 lengths, and it doesn't have side effects, then emit cmpstrnsi
4342 using length MIN(strlen(string)+1, arg3). */
4343 #ifdef HAVE_cmpstrnsi
4346 tree len
, len1
, len2
;
4347 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4350 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4351 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4352 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4355 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4357 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4358 enum machine_mode insn_mode
4359 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4361 len1
= c_strlen (arg1
, 1);
4362 len2
= c_strlen (arg2
, 1);
4365 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4367 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4369 /* If we don't have a constant length for the first, use the length
4370 of the second, if we know it. We don't require a constant for
4371 this case; some cost analysis could be done if both are available
4372 but neither is constant. For now, assume they're equally cheap,
4373 unless one has side effects. If both strings have constant lengths,
4380 else if (TREE_SIDE_EFFECTS (len1
))
4382 else if (TREE_SIDE_EFFECTS (len2
))
4384 else if (TREE_CODE (len1
) != INTEGER_CST
)
4386 else if (TREE_CODE (len2
) != INTEGER_CST
)
4388 else if (tree_int_cst_lt (len1
, len2
))
4393 /* If both arguments have side effects, we cannot optimize. */
4394 if (!len
|| TREE_SIDE_EFFECTS (len
))
4397 /* The actual new length parameter is MIN(len,arg3). */
4398 len
= fold_build2 (MIN_EXPR
, TREE_TYPE (len
), len
,
4399 fold_convert (TREE_TYPE (len
), arg3
));
4401 /* If we don't have POINTER_TYPE, call the function. */
4402 if (arg1_align
== 0 || arg2_align
== 0)
4405 /* Make a place to write the result of the instruction. */
4408 && REG_P (result
) && GET_MODE (result
) == insn_mode
4409 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4410 result
= gen_reg_rtx (insn_mode
);
4412 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4413 arg1
= builtin_save_expr (arg1
);
4414 arg2
= builtin_save_expr (arg2
);
4415 len
= builtin_save_expr (len
);
4417 arg1_rtx
= get_memory_rtx (arg1
, len
);
4418 arg2_rtx
= get_memory_rtx (arg2
, len
);
4419 arg3_rtx
= expand_normal (len
);
4420 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4421 GEN_INT (MIN (arg1_align
, arg2_align
)));
4426 /* Return the value in the proper mode for this function. */
4427 mode
= TYPE_MODE (TREE_TYPE (exp
));
4428 if (GET_MODE (result
) == mode
)
4431 return convert_to_mode (mode
, result
, 0);
4432 convert_move (target
, result
, 0);
4436 /* Expand the library call ourselves using a stabilized argument
4437 list to avoid re-evaluating the function's arguments twice. */
4438 fndecl
= get_callee_fndecl (exp
);
4439 fn
= build_call_expr (fndecl
, 3, arg1
, arg2
, len
);
4440 if (TREE_CODE (fn
) == CALL_EXPR
)
4441 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4442 return expand_call (fn
, target
, target
== const0_rtx
);
4448 /* Expand expression EXP, which is a call to the strcat builtin.
4449 Return NULL_RTX if we failed the caller should emit a normal call,
4450 otherwise try to get the result in TARGET, if convenient. */
4453 expand_builtin_strcat (tree fndecl
, tree exp
, rtx target
, enum machine_mode mode
)
4455 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4459 tree dst
= CALL_EXPR_ARG (exp
, 0);
4460 tree src
= CALL_EXPR_ARG (exp
, 1);
4461 const char *p
= c_getstr (src
);
4463 /* If the string length is zero, return the dst parameter. */
4464 if (p
&& *p
== '\0')
4465 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
4469 /* See if we can store by pieces into (dst + strlen(dst)). */
4470 tree newsrc
, newdst
,
4471 strlen_fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
4474 /* Stabilize the argument list. */
4475 newsrc
= builtin_save_expr (src
);
4476 dst
= builtin_save_expr (dst
);
4480 /* Create strlen (dst). */
4481 newdst
= build_call_expr (strlen_fn
, 1, dst
);
4482 /* Create (dst p+ strlen (dst)). */
4484 newdst
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dst
), dst
, newdst
);
4485 newdst
= builtin_save_expr (newdst
);
4487 if (!expand_builtin_strcpy_args (fndecl
, newdst
, newsrc
, target
, mode
))
4489 end_sequence (); /* Stop sequence. */
4493 /* Output the entire sequence. */
4494 insns
= get_insns ();
4498 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
4505 /* Expand expression EXP, which is a call to the strncat builtin.
4506 Return NULL_RTX if we failed the caller should emit a normal call,
4507 otherwise try to get the result in TARGET, if convenient. */
4510 expand_builtin_strncat (tree exp
, rtx target
, enum machine_mode mode
)
4512 if (validate_arglist (exp
,
4513 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4515 tree result
= fold_builtin_strncat (CALL_EXPR_ARG (exp
, 0),
4516 CALL_EXPR_ARG (exp
, 1),
4517 CALL_EXPR_ARG (exp
, 2));
4519 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4524 /* Expand expression EXP, which is a call to the strspn builtin.
4525 Return NULL_RTX if we failed the caller should emit a normal call,
4526 otherwise try to get the result in TARGET, if convenient. */
4529 expand_builtin_strspn (tree exp
, rtx target
, enum machine_mode mode
)
4531 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4533 tree result
= fold_builtin_strspn (CALL_EXPR_ARG (exp
, 0),
4534 CALL_EXPR_ARG (exp
, 1));
4536 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4541 /* Expand expression EXP, which is a call to the strcspn builtin.
4542 Return NULL_RTX if we failed the caller should emit a normal call,
4543 otherwise try to get the result in TARGET, if convenient. */
4546 expand_builtin_strcspn (tree exp
, rtx target
, enum machine_mode mode
)
4548 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4550 tree result
= fold_builtin_strcspn (CALL_EXPR_ARG (exp
, 0),
4551 CALL_EXPR_ARG (exp
, 1));
4553 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4558 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4559 if that's convenient. */
4562 expand_builtin_saveregs (void)
4566 /* Don't do __builtin_saveregs more than once in a function.
4567 Save the result of the first call and reuse it. */
4568 if (saveregs_value
!= 0)
4569 return saveregs_value
;
4571 /* When this function is called, it means that registers must be
4572 saved on entry to this function. So we migrate the call to the
4573 first insn of this function. */
4577 /* Do whatever the machine needs done in this case. */
4578 val
= targetm
.calls
.expand_builtin_saveregs ();
4583 saveregs_value
= val
;
4585 /* Put the insns after the NOTE that starts the function. If this
4586 is inside a start_sequence, make the outer-level insn chain current, so
4587 the code is placed at the start of the function. */
4588 push_topmost_sequence ();
4589 emit_insn_after (seq
, entry_of_function ());
4590 pop_topmost_sequence ();
4595 /* __builtin_args_info (N) returns word N of the arg space info
4596 for the current function. The number and meanings of words
4597 is controlled by the definition of CUMULATIVE_ARGS. */
4600 expand_builtin_args_info (tree exp
)
4602 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
4603 int *word_ptr
= (int *) &crtl
->args
.info
;
4605 gcc_assert (sizeof (CUMULATIVE_ARGS
) % sizeof (int) == 0);
4607 if (call_expr_nargs (exp
) != 0)
4609 if (!host_integerp (CALL_EXPR_ARG (exp
, 0), 0))
4610 error ("argument of %<__builtin_args_info%> must be constant");
4613 HOST_WIDE_INT wordnum
= tree_low_cst (CALL_EXPR_ARG (exp
, 0), 0);
4615 if (wordnum
< 0 || wordnum
>= nwords
)
4616 error ("argument of %<__builtin_args_info%> out of range");
4618 return GEN_INT (word_ptr
[wordnum
]);
4622 error ("missing argument in %<__builtin_args_info%>");
4627 /* Expand a call to __builtin_next_arg. */
4630 expand_builtin_next_arg (void)
4632 /* Checking arguments is already done in fold_builtin_next_arg
4633 that must be called before this function. */
4634 return expand_binop (ptr_mode
, add_optab
,
4635 crtl
->args
.internal_arg_pointer
,
4636 crtl
->args
.arg_offset_rtx
,
4637 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4640 /* Make it easier for the backends by protecting the valist argument
4641 from multiple evaluations. */
4644 stabilize_va_list (tree valist
, int needs_lvalue
)
4646 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4648 gcc_assert (vatype
!= NULL_TREE
);
4650 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4652 if (TREE_SIDE_EFFECTS (valist
))
4653 valist
= save_expr (valist
);
4655 /* For this case, the backends will be expecting a pointer to
4656 vatype, but it's possible we've actually been given an array
4657 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4659 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4661 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4662 valist
= build_fold_addr_expr_with_type (valist
, p1
);
4671 if (! TREE_SIDE_EFFECTS (valist
))
4674 pt
= build_pointer_type (vatype
);
4675 valist
= fold_build1 (ADDR_EXPR
, pt
, valist
);
4676 TREE_SIDE_EFFECTS (valist
) = 1;
4679 if (TREE_SIDE_EFFECTS (valist
))
4680 valist
= save_expr (valist
);
4681 valist
= build_fold_indirect_ref (valist
);
4687 /* The "standard" definition of va_list is void*. */
4690 std_build_builtin_va_list (void)
4692 return ptr_type_node
;
4695 /* The "standard" abi va_list is va_list_type_node. */
4698 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4700 return va_list_type_node
;
4703 /* The "standard" type of va_list is va_list_type_node. */
4706 std_canonical_va_list_type (tree type
)
4710 if (INDIRECT_REF_P (type
))
4711 type
= TREE_TYPE (type
);
4712 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE(type
)))
4713 type
= TREE_TYPE (type
);
4714 wtype
= va_list_type_node
;
4716 /* Treat structure va_list types. */
4717 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4718 htype
= TREE_TYPE (htype
);
4719 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4721 /* If va_list is an array type, the argument may have decayed
4722 to a pointer type, e.g. by being passed to another function.
4723 In that case, unwrap both types so that we can compare the
4724 underlying records. */
4725 if (TREE_CODE (htype
) == ARRAY_TYPE
4726 || POINTER_TYPE_P (htype
))
4728 wtype
= TREE_TYPE (wtype
);
4729 htype
= TREE_TYPE (htype
);
4732 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4733 return va_list_type_node
;
4738 /* The "standard" implementation of va_start: just assign `nextarg' to
4742 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4744 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4745 convert_move (va_r
, nextarg
, 0);
4748 /* Expand EXP, a call to __builtin_va_start. */
4751 expand_builtin_va_start (tree exp
)
4756 if (call_expr_nargs (exp
) < 2)
4758 error ("too few arguments to function %<va_start%>");
4762 if (fold_builtin_next_arg (exp
, true))
4765 nextarg
= expand_builtin_next_arg ();
4766 valist
= stabilize_va_list (CALL_EXPR_ARG (exp
, 0), 1);
4768 if (targetm
.expand_builtin_va_start
)
4769 targetm
.expand_builtin_va_start (valist
, nextarg
);
4771 std_expand_builtin_va_start (valist
, nextarg
);
4776 /* The "standard" implementation of va_arg: read the value from the
4777 current (padded) address and increment by the (padded) size. */
4780 std_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
*pre_p
,
4783 tree addr
, t
, type_size
, rounded_size
, valist_tmp
;
4784 unsigned HOST_WIDE_INT align
, boundary
;
4787 #ifdef ARGS_GROW_DOWNWARD
4788 /* All of the alignment and movement below is for args-grow-up machines.
4789 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4790 implement their own specialized gimplify_va_arg_expr routines. */
4794 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, false);
4796 type
= build_pointer_type (type
);
4798 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4799 boundary
= FUNCTION_ARG_BOUNDARY (TYPE_MODE (type
), type
);
4801 /* When we align parameter on stack for caller, if the parameter
4802 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4803 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4804 here with caller. */
4805 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
4806 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
4808 boundary
/= BITS_PER_UNIT
;
4810 /* Hoist the valist value into a temporary for the moment. */
4811 valist_tmp
= get_initialized_tmp_var (valist
, pre_p
, NULL
);
4813 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4814 requires greater alignment, we must perform dynamic alignment. */
4815 if (boundary
> align
4816 && !integer_zerop (TYPE_SIZE (type
)))
4818 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4819 fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (valist
),
4820 valist_tmp
, size_int (boundary
- 1)));
4821 gimplify_and_add (t
, pre_p
);
4823 t
= fold_convert (sizetype
, valist_tmp
);
4824 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4825 fold_convert (TREE_TYPE (valist
),
4826 fold_build2 (BIT_AND_EXPR
, sizetype
, t
,
4827 size_int (-boundary
))));
4828 gimplify_and_add (t
, pre_p
);
4833 /* If the actual alignment is less than the alignment of the type,
4834 adjust the type accordingly so that we don't assume strict alignment
4835 when dereferencing the pointer. */
4836 boundary
*= BITS_PER_UNIT
;
4837 if (boundary
< TYPE_ALIGN (type
))
4839 type
= build_variant_type_copy (type
);
4840 TYPE_ALIGN (type
) = boundary
;
4843 /* Compute the rounded size of the type. */
4844 type_size
= size_in_bytes (type
);
4845 rounded_size
= round_up (type_size
, align
);
4847 /* Reduce rounded_size so it's sharable with the postqueue. */
4848 gimplify_expr (&rounded_size
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4852 if (PAD_VARARGS_DOWN
&& !integer_zerop (rounded_size
))
4854 /* Small args are padded downward. */
4855 t
= fold_build2 (GT_EXPR
, sizetype
, rounded_size
, size_int (align
));
4856 t
= fold_build3 (COND_EXPR
, sizetype
, t
, size_zero_node
,
4857 size_binop (MINUS_EXPR
, rounded_size
, type_size
));
4858 addr
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (addr
), addr
, t
);
4861 /* Compute new value for AP. */
4862 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (valist
), valist_tmp
, rounded_size
);
4863 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, t
);
4864 gimplify_and_add (t
, pre_p
);
4866 addr
= fold_convert (build_pointer_type (type
), addr
);
4869 addr
= build_va_arg_indirect_ref (addr
);
4871 return build_va_arg_indirect_ref (addr
);
4874 /* Build an indirect-ref expression over the given TREE, which represents a
4875 piece of a va_arg() expansion. */
4877 build_va_arg_indirect_ref (tree addr
)
4879 addr
= build_fold_indirect_ref (addr
);
4881 if (flag_mudflap
) /* Don't instrument va_arg INDIRECT_REF. */
4887 /* Return a dummy expression of type TYPE in order to keep going after an
4891 dummy_object (tree type
)
4893 tree t
= build_int_cst (build_pointer_type (type
), 0);
4894 return build1 (INDIRECT_REF
, type
, t
);
4897 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4898 builtin function, but a very special sort of operator. */
4900 enum gimplify_status
4901 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
4903 tree promoted_type
, have_va_type
;
4904 tree valist
= TREE_OPERAND (*expr_p
, 0);
4905 tree type
= TREE_TYPE (*expr_p
);
4908 /* Verify that valist is of the proper type. */
4909 have_va_type
= TREE_TYPE (valist
);
4910 if (have_va_type
== error_mark_node
)
4912 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
4914 if (have_va_type
== NULL_TREE
)
4916 error ("first argument to %<va_arg%> not of type %<va_list%>");
4920 /* Generate a diagnostic for requesting data of a type that cannot
4921 be passed through `...' due to type promotion at the call site. */
4922 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
4925 static bool gave_help
;
4927 /* Unfortunately, this is merely undefined, rather than a constraint
4928 violation, so we cannot make this an error. If this call is never
4929 executed, the program is still strictly conforming. */
4930 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4931 type
, promoted_type
);
4935 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4936 promoted_type
, type
);
4939 /* We can, however, treat "undefined" any way we please.
4940 Call abort to encourage the user to fix the program. */
4941 inform ("if this code is reached, the program will abort");
4942 t
= build_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
], 0);
4943 gimplify_and_add (t
, pre_p
);
4945 /* This is dead code, but go ahead and finish so that the
4946 mode of the result comes out right. */
4947 *expr_p
= dummy_object (type
);
4952 /* Make it easier for the backends by protecting the valist argument
4953 from multiple evaluations. */
4954 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
)
4956 /* For this case, the backends will be expecting a pointer to
4957 TREE_TYPE (abi), but it's possible we've
4958 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4960 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4962 tree p1
= build_pointer_type (TREE_TYPE (have_va_type
));
4963 valist
= build_fold_addr_expr_with_type (valist
, p1
);
4966 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4969 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_min_lval
, fb_lvalue
);
4971 if (!targetm
.gimplify_va_arg_expr
)
4972 /* FIXME: Once most targets are converted we should merely
4973 assert this is non-null. */
4976 *expr_p
= targetm
.gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
4981 /* Expand EXP, a call to __builtin_va_end. */
4984 expand_builtin_va_end (tree exp
)
4986 tree valist
= CALL_EXPR_ARG (exp
, 0);
4988 /* Evaluate for side effects, if needed. I hate macros that don't
4990 if (TREE_SIDE_EFFECTS (valist
))
4991 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4996 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4997 builtin rather than just as an assignment in stdarg.h because of the
4998 nastiness of array-type va_list types. */
5001 expand_builtin_va_copy (tree exp
)
5005 dst
= CALL_EXPR_ARG (exp
, 0);
5006 src
= CALL_EXPR_ARG (exp
, 1);
5008 dst
= stabilize_va_list (dst
, 1);
5009 src
= stabilize_va_list (src
, 0);
5011 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
5013 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
5015 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
5016 TREE_SIDE_EFFECTS (t
) = 1;
5017 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5021 rtx dstb
, srcb
, size
;
5023 /* Evaluate to pointers. */
5024 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5025 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5026 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
5027 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
5029 dstb
= convert_memory_address (Pmode
, dstb
);
5030 srcb
= convert_memory_address (Pmode
, srcb
);
5032 /* "Dereference" to BLKmode memories. */
5033 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
5034 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
5035 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
5036 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
5037 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
5038 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
5041 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
5047 /* Expand a call to one of the builtin functions __builtin_frame_address or
5048 __builtin_return_address. */
5051 expand_builtin_frame_address (tree fndecl
, tree exp
)
5053 /* The argument must be a nonnegative integer constant.
5054 It counts the number of frames to scan up the stack.
5055 The value is the return address saved in that frame. */
5056 if (call_expr_nargs (exp
) == 0)
5057 /* Warning about missing arg was already issued. */
5059 else if (! host_integerp (CALL_EXPR_ARG (exp
, 0), 1))
5061 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5062 error ("invalid argument to %<__builtin_frame_address%>");
5064 error ("invalid argument to %<__builtin_return_address%>");
5070 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
5071 tree_low_cst (CALL_EXPR_ARG (exp
, 0), 1));
5073 /* Some ports cannot access arbitrary stack frames. */
5076 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5077 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5079 warning (0, "unsupported argument to %<__builtin_return_address%>");
5083 /* For __builtin_frame_address, return what we've got. */
5084 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5088 && ! CONSTANT_P (tem
))
5089 tem
= copy_to_mode_reg (Pmode
, tem
);
5094 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5095 we failed and the caller should emit a normal call, otherwise try to get
5096 the result in TARGET, if convenient. */
5099 expand_builtin_alloca (tree exp
, rtx target
)
5104 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5105 should always expand to function calls. These can be intercepted
5110 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5113 /* Compute the argument. */
5114 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5116 /* Allocate the desired space. */
5117 result
= allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
5118 result
= convert_memory_address (ptr_mode
, result
);
5123 /* Expand a call to a bswap builtin with argument ARG0. MODE
5124 is the mode to expand with. */
5127 expand_builtin_bswap (tree exp
, rtx target
, rtx subtarget
)
5129 enum machine_mode mode
;
5133 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5136 arg
= CALL_EXPR_ARG (exp
, 0);
5137 mode
= TYPE_MODE (TREE_TYPE (arg
));
5138 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5140 target
= expand_unop (mode
, bswap_optab
, op0
, target
, 1);
5142 gcc_assert (target
);
5144 return convert_to_mode (mode
, target
, 0);
5147 /* Expand a call to a unary builtin in EXP.
5148 Return NULL_RTX if a normal call should be emitted rather than expanding the
5149 function in-line. If convenient, the result should be placed in TARGET.
5150 SUBTARGET may be used as the target for computing one of EXP's operands. */
5153 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
5154 rtx subtarget
, optab op_optab
)
5158 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5161 /* Compute the argument. */
5162 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
5163 VOIDmode
, EXPAND_NORMAL
);
5164 /* Compute op, into TARGET if possible.
5165 Set TARGET to wherever the result comes back. */
5166 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5167 op_optab
, op0
, target
, 1);
5168 gcc_assert (target
);
5170 return convert_to_mode (target_mode
, target
, 0);
5173 /* If the string passed to fputs is a constant and is one character
5174 long, we attempt to transform this call into __builtin_fputc(). */
5177 expand_builtin_fputs (tree exp
, rtx target
, bool unlocked
)
5179 /* Verify the arguments in the original call. */
5180 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5182 tree result
= fold_builtin_fputs (CALL_EXPR_ARG (exp
, 0),
5183 CALL_EXPR_ARG (exp
, 1),
5184 (target
== const0_rtx
),
5185 unlocked
, NULL_TREE
);
5187 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
5192 /* Expand a call to __builtin_expect. We just return our argument
5193 as the builtin_expect semantic should've been already executed by
5194 tree branch prediction pass. */
5197 expand_builtin_expect (tree exp
, rtx target
)
5201 if (call_expr_nargs (exp
) < 2)
5203 arg
= CALL_EXPR_ARG (exp
, 0);
5204 c
= CALL_EXPR_ARG (exp
, 1);
5206 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5207 /* When guessing was done, the hints should be already stripped away. */
5208 gcc_assert (!flag_guess_branch_prob
5209 || optimize
== 0 || errorcount
|| sorrycount
);
5214 expand_builtin_trap (void)
5218 emit_insn (gen_trap ());
5221 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
5225 /* Expand EXP, a call to fabs, fabsf or fabsl.
5226 Return NULL_RTX if a normal call should be emitted rather than expanding
5227 the function inline. If convenient, the result should be placed
5228 in TARGET. SUBTARGET may be used as the target for computing
5232 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5234 enum machine_mode mode
;
5238 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5241 arg
= CALL_EXPR_ARG (exp
, 0);
5242 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
5243 mode
= TYPE_MODE (TREE_TYPE (arg
));
5244 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5245 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5248 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5249 Return NULL is a normal call should be emitted rather than expanding the
5250 function inline. If convenient, the result should be placed in TARGET.
5251 SUBTARGET may be used as the target for computing the operand. */
5254 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5259 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5262 arg
= CALL_EXPR_ARG (exp
, 0);
5263 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5265 arg
= CALL_EXPR_ARG (exp
, 1);
5266 op1
= expand_normal (arg
);
5268 return expand_copysign (op0
, op1
, target
);
5271 /* Create a new constant string literal and return a char* pointer to it.
5272 The STRING_CST value is the LEN characters at STR. */
5274 build_string_literal (int len
, const char *str
)
5276 tree t
, elem
, index
, type
;
5278 t
= build_string (len
, str
);
5279 elem
= build_type_variant (char_type_node
, 1, 0);
5280 index
= build_index_type (size_int (len
- 1));
5281 type
= build_array_type (elem
, index
);
5282 TREE_TYPE (t
) = type
;
5283 TREE_CONSTANT (t
) = 1;
5284 TREE_READONLY (t
) = 1;
5285 TREE_STATIC (t
) = 1;
5287 type
= build_pointer_type (elem
);
5288 t
= build1 (ADDR_EXPR
, type
,
5289 build4 (ARRAY_REF
, elem
,
5290 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
5294 /* Expand EXP, a call to printf or printf_unlocked.
5295 Return NULL_RTX if a normal call should be emitted rather than transforming
5296 the function inline. If convenient, the result should be placed in
5297 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5300 expand_builtin_printf (tree exp
, rtx target
, enum machine_mode mode
,
5303 /* If we're using an unlocked function, assume the other unlocked
5304 functions exist explicitly. */
5305 tree
const fn_putchar
= unlocked
? built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
]
5306 : implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
5307 tree
const fn_puts
= unlocked
? built_in_decls
[BUILT_IN_PUTS_UNLOCKED
]
5308 : implicit_built_in_decls
[BUILT_IN_PUTS
];
5309 const char *fmt_str
;
5312 int nargs
= call_expr_nargs (exp
);
5314 /* If the return value is used, don't do the transformation. */
5315 if (target
!= const0_rtx
)
5318 /* Verify the required arguments in the original call. */
5321 fmt
= CALL_EXPR_ARG (exp
, 0);
5322 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5325 /* Check whether the format is a literal string constant. */
5326 fmt_str
= c_getstr (fmt
);
5327 if (fmt_str
== NULL
)
5330 if (!init_target_chars ())
5333 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5334 if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
5337 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp
, 1))))
5340 fn
= build_call_expr (fn_puts
, 1, CALL_EXPR_ARG (exp
, 1));
5342 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5343 else if (strcmp (fmt_str
, target_percent_c
) == 0)
5346 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1))) != INTEGER_TYPE
)
5349 fn
= build_call_expr (fn_putchar
, 1, CALL_EXPR_ARG (exp
, 1));
5353 /* We can't handle anything else with % args or %% ... yet. */
5354 if (strchr (fmt_str
, target_percent
))
5360 /* If the format specifier was "", printf does nothing. */
5361 if (fmt_str
[0] == '\0')
5363 /* If the format specifier has length of 1, call putchar. */
5364 if (fmt_str
[1] == '\0')
5366 /* Given printf("c"), (where c is any one character,)
5367 convert "c"[0] to an int and pass that to the replacement
5369 arg
= build_int_cst (NULL_TREE
, fmt_str
[0]);
5371 fn
= build_call_expr (fn_putchar
, 1, arg
);
5375 /* If the format specifier was "string\n", call puts("string"). */
5376 size_t len
= strlen (fmt_str
);
5377 if ((unsigned char)fmt_str
[len
- 1] == target_newline
)
5379 /* Create a NUL-terminated string that's one char shorter
5380 than the original, stripping off the trailing '\n'. */
5381 char *newstr
= XALLOCAVEC (char, len
);
5382 memcpy (newstr
, fmt_str
, len
- 1);
5383 newstr
[len
- 1] = 0;
5384 arg
= build_string_literal (len
, newstr
);
5386 fn
= build_call_expr (fn_puts
, 1, arg
);
5389 /* We'd like to arrange to call fputs(string,stdout) here,
5390 but we need stdout and don't have a way to get it yet. */
5397 if (TREE_CODE (fn
) == CALL_EXPR
)
5398 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5399 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
5402 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5403 Return NULL_RTX if a normal call should be emitted rather than transforming
5404 the function inline. If convenient, the result should be placed in
5405 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5408 expand_builtin_fprintf (tree exp
, rtx target
, enum machine_mode mode
,
5411 /* If we're using an unlocked function, assume the other unlocked
5412 functions exist explicitly. */
5413 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
5414 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
5415 tree
const fn_fputs
= unlocked
? built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
]
5416 : implicit_built_in_decls
[BUILT_IN_FPUTS
];
5417 const char *fmt_str
;
5420 int nargs
= call_expr_nargs (exp
);
5422 /* If the return value is used, don't do the transformation. */
5423 if (target
!= const0_rtx
)
5426 /* Verify the required arguments in the original call. */
5429 fp
= CALL_EXPR_ARG (exp
, 0);
5430 if (! POINTER_TYPE_P (TREE_TYPE (fp
)))
5432 fmt
= CALL_EXPR_ARG (exp
, 1);
5433 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5436 /* Check whether the format is a literal string constant. */
5437 fmt_str
= c_getstr (fmt
);
5438 if (fmt_str
== NULL
)
5441 if (!init_target_chars ())
5444 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5445 if (strcmp (fmt_str
, target_percent_s
) == 0)
5448 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp
, 2))))
5450 arg
= CALL_EXPR_ARG (exp
, 2);
5452 fn
= build_call_expr (fn_fputs
, 2, arg
, fp
);
5454 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5455 else if (strcmp (fmt_str
, target_percent_c
) == 0)
5458 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 2))) != INTEGER_TYPE
)
5460 arg
= CALL_EXPR_ARG (exp
, 2);
5462 fn
= build_call_expr (fn_fputc
, 2, arg
, fp
);
5466 /* We can't handle anything else with % args or %% ... yet. */
5467 if (strchr (fmt_str
, target_percent
))
5473 /* If the format specifier was "", fprintf does nothing. */
5474 if (fmt_str
[0] == '\0')
5476 /* Evaluate and ignore FILE* argument for side-effects. */
5477 expand_expr (fp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5481 /* When "string" doesn't contain %, replace all cases of
5482 fprintf(stream,string) with fputs(string,stream). The fputs
5483 builtin will take care of special cases like length == 1. */
5485 fn
= build_call_expr (fn_fputs
, 2, fmt
, fp
);
5490 if (TREE_CODE (fn
) == CALL_EXPR
)
5491 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5492 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
5495 /* Expand a call EXP to sprintf. Return NULL_RTX if
5496 a normal call should be emitted rather than expanding the function
5497 inline. If convenient, the result should be placed in TARGET with
5501 expand_builtin_sprintf (tree exp
, rtx target
, enum machine_mode mode
)
5504 const char *fmt_str
;
5505 int nargs
= call_expr_nargs (exp
);
5507 /* Verify the required arguments in the original call. */
5510 dest
= CALL_EXPR_ARG (exp
, 0);
5511 if (! POINTER_TYPE_P (TREE_TYPE (dest
)))
5513 fmt
= CALL_EXPR_ARG (exp
, 0);
5514 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5517 /* Check whether the format is a literal string constant. */
5518 fmt_str
= c_getstr (fmt
);
5519 if (fmt_str
== NULL
)
5522 if (!init_target_chars ())
5525 /* If the format doesn't contain % args or %%, use strcpy. */
5526 if (strchr (fmt_str
, target_percent
) == 0)
5528 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
5531 if ((nargs
> 2) || ! fn
)
5533 expand_expr (build_call_expr (fn
, 2, dest
, fmt
),
5534 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5535 if (target
== const0_rtx
)
5537 exp
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
5538 return expand_expr (exp
, target
, mode
, EXPAND_NORMAL
);
5540 /* If the format is "%s", use strcpy if the result isn't used. */
5541 else if (strcmp (fmt_str
, target_percent_s
) == 0)
5544 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
5550 arg
= CALL_EXPR_ARG (exp
, 2);
5551 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
5554 if (target
!= const0_rtx
)
5556 len
= c_strlen (arg
, 1);
5557 if (! len
|| TREE_CODE (len
) != INTEGER_CST
)
5563 expand_expr (build_call_expr (fn
, 2, dest
, arg
),
5564 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5566 if (target
== const0_rtx
)
5568 return expand_expr (len
, target
, mode
, EXPAND_NORMAL
);
5574 /* Expand a call to either the entry or exit function profiler. */
5577 expand_builtin_profile_func (bool exitp
)
5581 this = DECL_RTL (current_function_decl
);
5582 gcc_assert (MEM_P (this));
5583 this = XEXP (this, 0);
5586 which
= profile_function_exit_libfunc
;
5588 which
= profile_function_entry_libfunc
;
5590 emit_library_call (which
, LCT_NORMAL
, VOIDmode
, 2, this, Pmode
,
5591 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
5598 /* Expand a call to __builtin___clear_cache. */
5601 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
5603 #ifndef HAVE_clear_cache
5604 #ifdef CLEAR_INSN_CACHE
5605 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5606 does something. Just do the default expansion to a call to
5610 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5611 does nothing. There is no need to call it. Do nothing. */
5613 #endif /* CLEAR_INSN_CACHE */
5615 /* We have a "clear_cache" insn, and it will handle everything. */
5617 rtx begin_rtx
, end_rtx
;
5618 enum insn_code icode
;
5620 /* We must not expand to a library call. If we did, any
5621 fallback library function in libgcc that might contain a call to
5622 __builtin___clear_cache() would recurse infinitely. */
5623 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5625 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5629 if (HAVE_clear_cache
)
5631 icode
= CODE_FOR_clear_cache
;
5633 begin
= CALL_EXPR_ARG (exp
, 0);
5634 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5635 begin_rtx
= convert_memory_address (Pmode
, begin_rtx
);
5636 if (!insn_data
[icode
].operand
[0].predicate (begin_rtx
, Pmode
))
5637 begin_rtx
= copy_to_mode_reg (Pmode
, begin_rtx
);
5639 end
= CALL_EXPR_ARG (exp
, 1);
5640 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5641 end_rtx
= convert_memory_address (Pmode
, end_rtx
);
5642 if (!insn_data
[icode
].operand
[1].predicate (end_rtx
, Pmode
))
5643 end_rtx
= copy_to_mode_reg (Pmode
, end_rtx
);
5645 emit_insn (gen_clear_cache (begin_rtx
, end_rtx
));
5648 #endif /* HAVE_clear_cache */
5651 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5654 round_trampoline_addr (rtx tramp
)
5656 rtx temp
, addend
, mask
;
5658 /* If we don't need too much alignment, we'll have been guaranteed
5659 proper alignment by get_trampoline_type. */
5660 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5663 /* Round address up to desired boundary. */
5664 temp
= gen_reg_rtx (Pmode
);
5665 addend
= GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1);
5666 mask
= GEN_INT (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
5668 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5669 temp
, 0, OPTAB_LIB_WIDEN
);
5670 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5671 temp
, 0, OPTAB_LIB_WIDEN
);
5677 expand_builtin_init_trampoline (tree exp
)
5679 tree t_tramp
, t_func
, t_chain
;
5680 rtx r_tramp
, r_func
, r_chain
;
5681 #ifdef TRAMPOLINE_TEMPLATE
5685 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5686 POINTER_TYPE
, VOID_TYPE
))
5689 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5690 t_func
= CALL_EXPR_ARG (exp
, 1);
5691 t_chain
= CALL_EXPR_ARG (exp
, 2);
5693 r_tramp
= expand_normal (t_tramp
);
5694 r_func
= expand_normal (t_func
);
5695 r_chain
= expand_normal (t_chain
);
5697 /* Generate insns to initialize the trampoline. */
5698 r_tramp
= round_trampoline_addr (r_tramp
);
5699 #ifdef TRAMPOLINE_TEMPLATE
5700 blktramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5701 set_mem_align (blktramp
, TRAMPOLINE_ALIGNMENT
);
5702 emit_block_move (blktramp
, assemble_trampoline_template (),
5703 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
5705 trampolines_created
= 1;
5706 INITIALIZE_TRAMPOLINE (r_tramp
, r_func
, r_chain
);
5712 expand_builtin_adjust_trampoline (tree exp
)
5716 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5719 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5720 tramp
= round_trampoline_addr (tramp
);
5721 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5722 TRAMPOLINE_ADJUST_ADDRESS (tramp
);
5728 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5729 function. The function first checks whether the back end provides
5730 an insn to implement signbit for the respective mode. If not, it
5731 checks whether the floating point format of the value is such that
5732 the sign bit can be extracted. If that is not the case, the
5733 function returns NULL_RTX to indicate that a normal call should be
5734 emitted rather than expanding the function in-line. EXP is the
5735 expression that is a call to the builtin function; if convenient,
5736 the result should be placed in TARGET. */
5738 expand_builtin_signbit (tree exp
, rtx target
)
5740 const struct real_format
*fmt
;
5741 enum machine_mode fmode
, imode
, rmode
;
5742 HOST_WIDE_INT hi
, lo
;
5745 enum insn_code icode
;
5748 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5751 arg
= CALL_EXPR_ARG (exp
, 0);
5752 fmode
= TYPE_MODE (TREE_TYPE (arg
));
5753 rmode
= TYPE_MODE (TREE_TYPE (exp
));
5754 fmt
= REAL_MODE_FORMAT (fmode
);
5756 arg
= builtin_save_expr (arg
);
5758 /* Expand the argument yielding a RTX expression. */
5759 temp
= expand_normal (arg
);
5761 /* Check if the back end provides an insn that handles signbit for the
5763 icode
= signbit_optab
->handlers
[(int) fmode
].insn_code
;
5764 if (icode
!= CODE_FOR_nothing
)
5766 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5767 emit_unop_insn (icode
, target
, temp
, UNKNOWN
);
5771 /* For floating point formats without a sign bit, implement signbit
5773 bitpos
= fmt
->signbit_ro
;
5776 /* But we can't do this if the format supports signed zero. */
5777 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5780 arg
= fold_build2 (LT_EXPR
, TREE_TYPE (exp
), arg
,
5781 build_real (TREE_TYPE (arg
), dconst0
));
5782 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5785 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5787 imode
= int_mode_for_mode (fmode
);
5788 if (imode
== BLKmode
)
5790 temp
= gen_lowpart (imode
, temp
);
5795 /* Handle targets with different FP word orders. */
5796 if (FLOAT_WORDS_BIG_ENDIAN
)
5797 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5799 word
= bitpos
/ BITS_PER_WORD
;
5800 temp
= operand_subword_force (temp
, word
, fmode
);
5801 bitpos
= bitpos
% BITS_PER_WORD
;
5804 /* Force the intermediate word_mode (or narrower) result into a
5805 register. This avoids attempting to create paradoxical SUBREGs
5806 of floating point modes below. */
5807 temp
= force_reg (imode
, temp
);
5809 /* If the bitpos is within the "result mode" lowpart, the operation
5810 can be implement with a single bitwise AND. Otherwise, we need
5811 a right shift and an AND. */
5813 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5815 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
5818 lo
= (HOST_WIDE_INT
) 1 << bitpos
;
5822 hi
= (HOST_WIDE_INT
) 1 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
5826 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5827 temp
= gen_lowpart (rmode
, temp
);
5828 temp
= expand_binop (rmode
, and_optab
, temp
,
5829 immed_double_const (lo
, hi
, rmode
),
5830 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5834 /* Perform a logical right shift to place the signbit in the least
5835 significant bit, then truncate the result to the desired mode
5836 and mask just this bit. */
5837 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
,
5838 build_int_cst (NULL_TREE
, bitpos
), NULL_RTX
, 1);
5839 temp
= gen_lowpart (rmode
, temp
);
5840 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5841 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5847 /* Expand fork or exec calls. TARGET is the desired target of the
5848 call. EXP is the call. FN is the
5849 identificator of the actual function. IGNORE is nonzero if the
5850 value is to be ignored. */
5853 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5858 /* If we are not profiling, just call the function. */
5859 if (!profile_arc_flag
)
5862 /* Otherwise call the wrapper. This should be equivalent for the rest of
5863 compiler, so the code does not diverge, and the wrapper may run the
5864 code necessary for keeping the profiling sane. */
5866 switch (DECL_FUNCTION_CODE (fn
))
5869 id
= get_identifier ("__gcov_fork");
5872 case BUILT_IN_EXECL
:
5873 id
= get_identifier ("__gcov_execl");
5876 case BUILT_IN_EXECV
:
5877 id
= get_identifier ("__gcov_execv");
5880 case BUILT_IN_EXECLP
:
5881 id
= get_identifier ("__gcov_execlp");
5884 case BUILT_IN_EXECLE
:
5885 id
= get_identifier ("__gcov_execle");
5888 case BUILT_IN_EXECVP
:
5889 id
= get_identifier ("__gcov_execvp");
5892 case BUILT_IN_EXECVE
:
5893 id
= get_identifier ("__gcov_execve");
5900 decl
= build_decl (FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5901 DECL_EXTERNAL (decl
) = 1;
5902 TREE_PUBLIC (decl
) = 1;
5903 DECL_ARTIFICIAL (decl
) = 1;
5904 TREE_NOTHROW (decl
) = 1;
5905 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5906 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5907 call
= rewrite_call_expr (exp
, 0, decl
, 0);
5908 return expand_call (call
, target
, ignore
);
5913 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5914 the pointer in these functions is void*, the tree optimizers may remove
5915 casts. The mode computed in expand_builtin isn't reliable either, due
5916 to __sync_bool_compare_and_swap.
5918 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5919 group of builtins. This gives us log2 of the mode size. */
5921 static inline enum machine_mode
5922 get_builtin_sync_mode (int fcode_diff
)
5924 /* The size is not negotiable, so ask not to get BLKmode in return
5925 if the target indicates that a smaller size would be better. */
5926 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5929 /* Expand the memory expression LOC and return the appropriate memory operand
5930 for the builtin_sync operations. */
5933 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5937 addr
= expand_expr (loc
, NULL_RTX
, Pmode
, EXPAND_SUM
);
5939 /* Note that we explicitly do not want any alias information for this
5940 memory, so that we kill all other live memories. Otherwise we don't
5941 satisfy the full barrier semantics of the intrinsic. */
5942 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5944 set_mem_align (mem
, get_pointer_alignment (loc
, BIGGEST_ALIGNMENT
));
5945 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5946 MEM_VOLATILE_P (mem
) = 1;
5951 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5952 EXP is the CALL_EXPR. CODE is the rtx code
5953 that corresponds to the arithmetic or logical operation from the name;
5954 an exception here is that NOT actually means NAND. TARGET is an optional
5955 place for us to store the results; AFTER is true if this is the
5956 fetch_and_xxx form. IGNORE is true if we don't actually care about
5957 the result of the operation at all. */
5960 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5961 enum rtx_code code
, bool after
,
5962 rtx target
, bool ignore
)
5965 enum machine_mode old_mode
;
5967 /* Expand the operands. */
5968 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5970 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
, mode
, EXPAND_NORMAL
);
5971 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5972 of CONST_INTs, where we know the old_mode only from the call argument. */
5973 old_mode
= GET_MODE (val
);
5974 if (old_mode
== VOIDmode
)
5975 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5976 val
= convert_modes (mode
, old_mode
, val
, 1);
5979 return expand_sync_operation (mem
, val
, code
);
5981 return expand_sync_fetch_operation (mem
, val
, code
, after
, target
);
5984 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5985 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5986 true if this is the boolean form. TARGET is a place for us to store the
5987 results; this is NOT optional if IS_BOOL is true. */
5990 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5991 bool is_bool
, rtx target
)
5993 rtx old_val
, new_val
, mem
;
5994 enum machine_mode old_mode
;
5996 /* Expand the operands. */
5997 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6000 old_val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
,
6001 mode
, EXPAND_NORMAL
);
6002 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6003 of CONST_INTs, where we know the old_mode only from the call argument. */
6004 old_mode
= GET_MODE (old_val
);
6005 if (old_mode
== VOIDmode
)
6006 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
6007 old_val
= convert_modes (mode
, old_mode
, old_val
, 1);
6009 new_val
= expand_expr (CALL_EXPR_ARG (exp
, 2), NULL_RTX
,
6010 mode
, EXPAND_NORMAL
);
6011 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6012 of CONST_INTs, where we know the old_mode only from the call argument. */
6013 old_mode
= GET_MODE (new_val
);
6014 if (old_mode
== VOIDmode
)
6015 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 2)));
6016 new_val
= convert_modes (mode
, old_mode
, new_val
, 1);
6019 return expand_bool_compare_and_swap (mem
, old_val
, new_val
, target
);
6021 return expand_val_compare_and_swap (mem
, old_val
, new_val
, target
);
6024 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6025 general form is actually an atomic exchange, and some targets only
6026 support a reduced form with the second argument being a constant 1.
6027 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6031 expand_builtin_lock_test_and_set (enum machine_mode mode
, tree exp
,
6035 enum machine_mode old_mode
;
6037 /* Expand the operands. */
6038 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6039 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
, mode
, EXPAND_NORMAL
);
6040 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6041 of CONST_INTs, where we know the old_mode only from the call argument. */
6042 old_mode
= GET_MODE (val
);
6043 if (old_mode
== VOIDmode
)
6044 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
6045 val
= convert_modes (mode
, old_mode
, val
, 1);
6047 return expand_sync_lock_test_and_set (mem
, val
, target
);
6050 /* Expand the __sync_synchronize intrinsic. */
6053 expand_builtin_synchronize (void)
6057 #ifdef HAVE_memory_barrier
6058 if (HAVE_memory_barrier
)
6060 emit_insn (gen_memory_barrier ());
6065 if (synchronize_libfunc
!= NULL_RTX
)
6067 emit_library_call (synchronize_libfunc
, LCT_NORMAL
, VOIDmode
, 0);
6071 /* If no explicit memory barrier instruction is available, create an
6072 empty asm stmt with a memory clobber. */
6073 x
= build4 (ASM_EXPR
, void_type_node
, build_string (0, ""), NULL
, NULL
,
6074 tree_cons (NULL
, build_string (6, "memory"), NULL
));
6075 ASM_VOLATILE_P (x
) = 1;
6076 expand_asm_expr (x
);
6079 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6082 expand_builtin_lock_release (enum machine_mode mode
, tree exp
)
6084 enum insn_code icode
;
6086 rtx val
= const0_rtx
;
6088 /* Expand the operands. */
6089 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6091 /* If there is an explicit operation in the md file, use it. */
6092 icode
= sync_lock_release
[mode
];
6093 if (icode
!= CODE_FOR_nothing
)
6095 if (!insn_data
[icode
].operand
[1].predicate (val
, mode
))
6096 val
= force_reg (mode
, val
);
6098 insn
= GEN_FCN (icode
) (mem
, val
);
6106 /* Otherwise we can implement this operation by emitting a barrier
6107 followed by a store of zero. */
6108 expand_builtin_synchronize ();
6109 emit_move_insn (mem
, val
);
6112 /* Expand an expression EXP that calls a built-in function,
6113 with result going to TARGET if that's convenient
6114 (and in mode MODE if that's convenient).
6115 SUBTARGET may be used as the target for computing one of EXP's operands.
6116 IGNORE is nonzero if the value is to be ignored. */
6119 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
6122 tree fndecl
= get_callee_fndecl (exp
);
6123 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6124 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
6126 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6127 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
6129 /* When not optimizing, generate calls to library functions for a certain
6132 && !called_as_built_in (fndecl
)
6133 && DECL_ASSEMBLER_NAME_SET_P (fndecl
)
6134 && fcode
!= BUILT_IN_ALLOCA
6135 && fcode
!= BUILT_IN_FREE
)
6136 return expand_call (exp
, target
, ignore
);
6138 /* The built-in function expanders test for target == const0_rtx
6139 to determine whether the function's result will be ignored. */
6141 target
= const0_rtx
;
6143 /* If the result of a pure or const built-in function is ignored, and
6144 none of its arguments are volatile, we can avoid expanding the
6145 built-in call and just evaluate the arguments for side-effects. */
6146 if (target
== const0_rtx
6147 && (DECL_PURE_P (fndecl
) || TREE_READONLY (fndecl
)))
6149 bool volatilep
= false;
6151 call_expr_arg_iterator iter
;
6153 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6154 if (TREE_THIS_VOLATILE (arg
))
6162 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6163 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6170 CASE_FLT_FN (BUILT_IN_FABS
):
6171 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6176 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6177 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6182 /* Just do a normal library call if we were unable to fold
6184 CASE_FLT_FN (BUILT_IN_CABS
):
6187 CASE_FLT_FN (BUILT_IN_EXP
):
6188 CASE_FLT_FN (BUILT_IN_EXP10
):
6189 CASE_FLT_FN (BUILT_IN_POW10
):
6190 CASE_FLT_FN (BUILT_IN_EXP2
):
6191 CASE_FLT_FN (BUILT_IN_EXPM1
):
6192 CASE_FLT_FN (BUILT_IN_LOGB
):
6193 CASE_FLT_FN (BUILT_IN_LOG
):
6194 CASE_FLT_FN (BUILT_IN_LOG10
):
6195 CASE_FLT_FN (BUILT_IN_LOG2
):
6196 CASE_FLT_FN (BUILT_IN_LOG1P
):
6197 CASE_FLT_FN (BUILT_IN_TAN
):
6198 CASE_FLT_FN (BUILT_IN_ASIN
):
6199 CASE_FLT_FN (BUILT_IN_ACOS
):
6200 CASE_FLT_FN (BUILT_IN_ATAN
):
6201 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6202 because of possible accuracy problems. */
6203 if (! flag_unsafe_math_optimizations
)
6205 CASE_FLT_FN (BUILT_IN_SQRT
):
6206 CASE_FLT_FN (BUILT_IN_FLOOR
):
6207 CASE_FLT_FN (BUILT_IN_CEIL
):
6208 CASE_FLT_FN (BUILT_IN_TRUNC
):
6209 CASE_FLT_FN (BUILT_IN_ROUND
):
6210 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6211 CASE_FLT_FN (BUILT_IN_RINT
):
6212 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
6217 CASE_FLT_FN (BUILT_IN_ILOGB
):
6218 if (! flag_unsafe_math_optimizations
)
6220 CASE_FLT_FN (BUILT_IN_ISINF
):
6221 CASE_FLT_FN (BUILT_IN_FINITE
):
6222 case BUILT_IN_ISFINITE
:
6223 case BUILT_IN_ISNORMAL
:
6224 target
= expand_builtin_interclass_mathfn (exp
, target
, subtarget
);
6229 CASE_FLT_FN (BUILT_IN_LCEIL
):
6230 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6231 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6232 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6233 target
= expand_builtin_int_roundingfn (exp
, target
);
6238 CASE_FLT_FN (BUILT_IN_LRINT
):
6239 CASE_FLT_FN (BUILT_IN_LLRINT
):
6240 CASE_FLT_FN (BUILT_IN_LROUND
):
6241 CASE_FLT_FN (BUILT_IN_LLROUND
):
6242 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6247 CASE_FLT_FN (BUILT_IN_POW
):
6248 target
= expand_builtin_pow (exp
, target
, subtarget
);
6253 CASE_FLT_FN (BUILT_IN_POWI
):
6254 target
= expand_builtin_powi (exp
, target
, subtarget
);
6259 CASE_FLT_FN (BUILT_IN_ATAN2
):
6260 CASE_FLT_FN (BUILT_IN_LDEXP
):
6261 CASE_FLT_FN (BUILT_IN_SCALB
):
6262 CASE_FLT_FN (BUILT_IN_SCALBN
):
6263 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6264 if (! flag_unsafe_math_optimizations
)
6267 CASE_FLT_FN (BUILT_IN_FMOD
):
6268 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6269 CASE_FLT_FN (BUILT_IN_DREM
):
6270 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6275 CASE_FLT_FN (BUILT_IN_CEXPI
):
6276 target
= expand_builtin_cexpi (exp
, target
, subtarget
);
6277 gcc_assert (target
);
6280 CASE_FLT_FN (BUILT_IN_SIN
):
6281 CASE_FLT_FN (BUILT_IN_COS
):
6282 if (! flag_unsafe_math_optimizations
)
6284 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6289 CASE_FLT_FN (BUILT_IN_SINCOS
):
6290 if (! flag_unsafe_math_optimizations
)
6292 target
= expand_builtin_sincos (exp
);
6297 case BUILT_IN_APPLY_ARGS
:
6298 return expand_builtin_apply_args ();
6300 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6301 FUNCTION with a copy of the parameters described by
6302 ARGUMENTS, and ARGSIZE. It returns a block of memory
6303 allocated on the stack into which is stored all the registers
6304 that might possibly be used for returning the result of a
6305 function. ARGUMENTS is the value returned by
6306 __builtin_apply_args. ARGSIZE is the number of bytes of
6307 arguments that must be copied. ??? How should this value be
6308 computed? We'll also need a safe worst case value for varargs
6310 case BUILT_IN_APPLY
:
6311 if (!validate_arglist (exp
, POINTER_TYPE
,
6312 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6313 && !validate_arglist (exp
, REFERENCE_TYPE
,
6314 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6320 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6321 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6322 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6324 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6327 /* __builtin_return (RESULT) causes the function to return the
6328 value described by RESULT. RESULT is address of the block of
6329 memory returned by __builtin_apply. */
6330 case BUILT_IN_RETURN
:
6331 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6332 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6335 case BUILT_IN_SAVEREGS
:
6336 return expand_builtin_saveregs ();
6338 case BUILT_IN_ARGS_INFO
:
6339 return expand_builtin_args_info (exp
);
6341 case BUILT_IN_VA_ARG_PACK
:
6342 /* All valid uses of __builtin_va_arg_pack () are removed during
6344 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6347 case BUILT_IN_VA_ARG_PACK_LEN
:
6348 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6350 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6353 /* Return the address of the first anonymous stack arg. */
6354 case BUILT_IN_NEXT_ARG
:
6355 if (fold_builtin_next_arg (exp
, false))
6357 return expand_builtin_next_arg ();
6359 case BUILT_IN_CLEAR_CACHE
:
6360 target
= expand_builtin___clear_cache (exp
);
6365 case BUILT_IN_CLASSIFY_TYPE
:
6366 return expand_builtin_classify_type (exp
);
6368 case BUILT_IN_CONSTANT_P
:
6371 case BUILT_IN_FRAME_ADDRESS
:
6372 case BUILT_IN_RETURN_ADDRESS
:
6373 return expand_builtin_frame_address (fndecl
, exp
);
6375 /* Returns the address of the area where the structure is returned.
6377 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6378 if (call_expr_nargs (exp
) != 0
6379 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6380 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6383 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6385 case BUILT_IN_ALLOCA
:
6386 target
= expand_builtin_alloca (exp
, target
);
6391 case BUILT_IN_STACK_SAVE
:
6392 return expand_stack_save ();
6394 case BUILT_IN_STACK_RESTORE
:
6395 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6398 case BUILT_IN_BSWAP32
:
6399 case BUILT_IN_BSWAP64
:
6400 target
= expand_builtin_bswap (exp
, target
, subtarget
);
6406 CASE_INT_FN (BUILT_IN_FFS
):
6407 case BUILT_IN_FFSIMAX
:
6408 target
= expand_builtin_unop (target_mode
, exp
, target
,
6409 subtarget
, ffs_optab
);
6414 CASE_INT_FN (BUILT_IN_CLZ
):
6415 case BUILT_IN_CLZIMAX
:
6416 target
= expand_builtin_unop (target_mode
, exp
, target
,
6417 subtarget
, clz_optab
);
6422 CASE_INT_FN (BUILT_IN_CTZ
):
6423 case BUILT_IN_CTZIMAX
:
6424 target
= expand_builtin_unop (target_mode
, exp
, target
,
6425 subtarget
, ctz_optab
);
6430 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6431 case BUILT_IN_POPCOUNTIMAX
:
6432 target
= expand_builtin_unop (target_mode
, exp
, target
,
6433 subtarget
, popcount_optab
);
6438 CASE_INT_FN (BUILT_IN_PARITY
):
6439 case BUILT_IN_PARITYIMAX
:
6440 target
= expand_builtin_unop (target_mode
, exp
, target
,
6441 subtarget
, parity_optab
);
6446 case BUILT_IN_STRLEN
:
6447 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6452 case BUILT_IN_STRCPY
:
6453 target
= expand_builtin_strcpy (fndecl
, exp
, target
, mode
);
6458 case BUILT_IN_STRNCPY
:
6459 target
= expand_builtin_strncpy (exp
, target
, mode
);
6464 case BUILT_IN_STPCPY
:
6465 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6470 case BUILT_IN_STRCAT
:
6471 target
= expand_builtin_strcat (fndecl
, exp
, target
, mode
);
6476 case BUILT_IN_STRNCAT
:
6477 target
= expand_builtin_strncat (exp
, target
, mode
);
6482 case BUILT_IN_STRSPN
:
6483 target
= expand_builtin_strspn (exp
, target
, mode
);
6488 case BUILT_IN_STRCSPN
:
6489 target
= expand_builtin_strcspn (exp
, target
, mode
);
6494 case BUILT_IN_STRSTR
:
6495 target
= expand_builtin_strstr (exp
, target
, mode
);
6500 case BUILT_IN_STRPBRK
:
6501 target
= expand_builtin_strpbrk (exp
, target
, mode
);
6506 case BUILT_IN_INDEX
:
6507 case BUILT_IN_STRCHR
:
6508 target
= expand_builtin_strchr (exp
, target
, mode
);
6513 case BUILT_IN_RINDEX
:
6514 case BUILT_IN_STRRCHR
:
6515 target
= expand_builtin_strrchr (exp
, target
, mode
);
6520 case BUILT_IN_MEMCPY
:
6521 target
= expand_builtin_memcpy (exp
, target
, mode
);
6526 case BUILT_IN_MEMPCPY
:
6527 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6532 case BUILT_IN_MEMMOVE
:
6533 target
= expand_builtin_memmove (exp
, target
, mode
, ignore
);
6538 case BUILT_IN_BCOPY
:
6539 target
= expand_builtin_bcopy (exp
, ignore
);
6544 case BUILT_IN_MEMSET
:
6545 target
= expand_builtin_memset (exp
, target
, mode
);
6550 case BUILT_IN_BZERO
:
6551 target
= expand_builtin_bzero (exp
);
6556 case BUILT_IN_STRCMP
:
6557 target
= expand_builtin_strcmp (exp
, target
, mode
);
6562 case BUILT_IN_STRNCMP
:
6563 target
= expand_builtin_strncmp (exp
, target
, mode
);
6568 case BUILT_IN_MEMCHR
:
6569 target
= expand_builtin_memchr (exp
, target
, mode
);
6575 case BUILT_IN_MEMCMP
:
6576 target
= expand_builtin_memcmp (exp
, target
, mode
);
6581 case BUILT_IN_SETJMP
:
6582 /* This should have been lowered to the builtins below. */
6585 case BUILT_IN_SETJMP_SETUP
:
6586 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6587 and the receiver label. */
6588 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6590 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6591 VOIDmode
, EXPAND_NORMAL
);
6592 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6593 rtx label_r
= label_rtx (label
);
6595 /* This is copied from the handling of non-local gotos. */
6596 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6597 nonlocal_goto_handler_labels
6598 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6599 nonlocal_goto_handler_labels
);
6600 /* ??? Do not let expand_label treat us as such since we would
6601 not want to be both on the list of non-local labels and on
6602 the list of forced labels. */
6603 FORCED_LABEL (label
) = 0;
6608 case BUILT_IN_SETJMP_DISPATCHER
:
6609 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6610 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6612 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6613 rtx label_r
= label_rtx (label
);
6615 /* Remove the dispatcher label from the list of non-local labels
6616 since the receiver labels have been added to it above. */
6617 remove_node_from_expr_list (label_r
, &nonlocal_goto_handler_labels
);
6622 case BUILT_IN_SETJMP_RECEIVER
:
6623 /* __builtin_setjmp_receiver is passed the receiver label. */
6624 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6626 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6627 rtx label_r
= label_rtx (label
);
6629 expand_builtin_setjmp_receiver (label_r
);
6634 /* __builtin_longjmp is passed a pointer to an array of five words.
6635 It's similar to the C library longjmp function but works with
6636 __builtin_setjmp above. */
6637 case BUILT_IN_LONGJMP
:
6638 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6640 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6641 VOIDmode
, EXPAND_NORMAL
);
6642 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6644 if (value
!= const1_rtx
)
6646 error ("%<__builtin_longjmp%> second argument must be 1");
6650 expand_builtin_longjmp (buf_addr
, value
);
6655 case BUILT_IN_NONLOCAL_GOTO
:
6656 target
= expand_builtin_nonlocal_goto (exp
);
6661 /* This updates the setjmp buffer that is its argument with the value
6662 of the current stack pointer. */
6663 case BUILT_IN_UPDATE_SETJMP_BUF
:
6664 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6667 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6669 expand_builtin_update_setjmp_buf (buf_addr
);
6675 expand_builtin_trap ();
6678 case BUILT_IN_PRINTF
:
6679 target
= expand_builtin_printf (exp
, target
, mode
, false);
6684 case BUILT_IN_PRINTF_UNLOCKED
:
6685 target
= expand_builtin_printf (exp
, target
, mode
, true);
6690 case BUILT_IN_FPUTS
:
6691 target
= expand_builtin_fputs (exp
, target
, false);
6695 case BUILT_IN_FPUTS_UNLOCKED
:
6696 target
= expand_builtin_fputs (exp
, target
, true);
6701 case BUILT_IN_FPRINTF
:
6702 target
= expand_builtin_fprintf (exp
, target
, mode
, false);
6707 case BUILT_IN_FPRINTF_UNLOCKED
:
6708 target
= expand_builtin_fprintf (exp
, target
, mode
, true);
6713 case BUILT_IN_SPRINTF
:
6714 target
= expand_builtin_sprintf (exp
, target
, mode
);
6719 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6720 case BUILT_IN_SIGNBITD32
:
6721 case BUILT_IN_SIGNBITD64
:
6722 case BUILT_IN_SIGNBITD128
:
6723 target
= expand_builtin_signbit (exp
, target
);
6728 /* Various hooks for the DWARF 2 __throw routine. */
6729 case BUILT_IN_UNWIND_INIT
:
6730 expand_builtin_unwind_init ();
6732 case BUILT_IN_DWARF_CFA
:
6733 return virtual_cfa_rtx
;
6734 #ifdef DWARF2_UNWIND_INFO
6735 case BUILT_IN_DWARF_SP_COLUMN
:
6736 return expand_builtin_dwarf_sp_column ();
6737 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6738 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6741 case BUILT_IN_FROB_RETURN_ADDR
:
6742 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6743 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6744 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6745 case BUILT_IN_EH_RETURN
:
6746 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6747 CALL_EXPR_ARG (exp
, 1));
6749 #ifdef EH_RETURN_DATA_REGNO
6750 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6751 return expand_builtin_eh_return_data_regno (exp
);
6753 case BUILT_IN_EXTEND_POINTER
:
6754 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6756 case BUILT_IN_VA_START
:
6757 return expand_builtin_va_start (exp
);
6758 case BUILT_IN_VA_END
:
6759 return expand_builtin_va_end (exp
);
6760 case BUILT_IN_VA_COPY
:
6761 return expand_builtin_va_copy (exp
);
6762 case BUILT_IN_EXPECT
:
6763 return expand_builtin_expect (exp
, target
);
6764 case BUILT_IN_PREFETCH
:
6765 expand_builtin_prefetch (exp
);
6768 case BUILT_IN_PROFILE_FUNC_ENTER
:
6769 return expand_builtin_profile_func (false);
6770 case BUILT_IN_PROFILE_FUNC_EXIT
:
6771 return expand_builtin_profile_func (true);
6773 case BUILT_IN_INIT_TRAMPOLINE
:
6774 return expand_builtin_init_trampoline (exp
);
6775 case BUILT_IN_ADJUST_TRAMPOLINE
:
6776 return expand_builtin_adjust_trampoline (exp
);
6779 case BUILT_IN_EXECL
:
6780 case BUILT_IN_EXECV
:
6781 case BUILT_IN_EXECLP
:
6782 case BUILT_IN_EXECLE
:
6783 case BUILT_IN_EXECVP
:
6784 case BUILT_IN_EXECVE
:
6785 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6790 case BUILT_IN_FETCH_AND_ADD_1
:
6791 case BUILT_IN_FETCH_AND_ADD_2
:
6792 case BUILT_IN_FETCH_AND_ADD_4
:
6793 case BUILT_IN_FETCH_AND_ADD_8
:
6794 case BUILT_IN_FETCH_AND_ADD_16
:
6795 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_ADD_1
);
6796 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6797 false, target
, ignore
);
6802 case BUILT_IN_FETCH_AND_SUB_1
:
6803 case BUILT_IN_FETCH_AND_SUB_2
:
6804 case BUILT_IN_FETCH_AND_SUB_4
:
6805 case BUILT_IN_FETCH_AND_SUB_8
:
6806 case BUILT_IN_FETCH_AND_SUB_16
:
6807 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_SUB_1
);
6808 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6809 false, target
, ignore
);
6814 case BUILT_IN_FETCH_AND_OR_1
:
6815 case BUILT_IN_FETCH_AND_OR_2
:
6816 case BUILT_IN_FETCH_AND_OR_4
:
6817 case BUILT_IN_FETCH_AND_OR_8
:
6818 case BUILT_IN_FETCH_AND_OR_16
:
6819 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_OR_1
);
6820 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6821 false, target
, ignore
);
6826 case BUILT_IN_FETCH_AND_AND_1
:
6827 case BUILT_IN_FETCH_AND_AND_2
:
6828 case BUILT_IN_FETCH_AND_AND_4
:
6829 case BUILT_IN_FETCH_AND_AND_8
:
6830 case BUILT_IN_FETCH_AND_AND_16
:
6831 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_AND_1
);
6832 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6833 false, target
, ignore
);
6838 case BUILT_IN_FETCH_AND_XOR_1
:
6839 case BUILT_IN_FETCH_AND_XOR_2
:
6840 case BUILT_IN_FETCH_AND_XOR_4
:
6841 case BUILT_IN_FETCH_AND_XOR_8
:
6842 case BUILT_IN_FETCH_AND_XOR_16
:
6843 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_XOR_1
);
6844 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6845 false, target
, ignore
);
6850 case BUILT_IN_FETCH_AND_NAND_1
:
6851 case BUILT_IN_FETCH_AND_NAND_2
:
6852 case BUILT_IN_FETCH_AND_NAND_4
:
6853 case BUILT_IN_FETCH_AND_NAND_8
:
6854 case BUILT_IN_FETCH_AND_NAND_16
:
6855 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_NAND_1
);
6856 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6857 false, target
, ignore
);
6862 case BUILT_IN_ADD_AND_FETCH_1
:
6863 case BUILT_IN_ADD_AND_FETCH_2
:
6864 case BUILT_IN_ADD_AND_FETCH_4
:
6865 case BUILT_IN_ADD_AND_FETCH_8
:
6866 case BUILT_IN_ADD_AND_FETCH_16
:
6867 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ADD_AND_FETCH_1
);
6868 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6869 true, target
, ignore
);
6874 case BUILT_IN_SUB_AND_FETCH_1
:
6875 case BUILT_IN_SUB_AND_FETCH_2
:
6876 case BUILT_IN_SUB_AND_FETCH_4
:
6877 case BUILT_IN_SUB_AND_FETCH_8
:
6878 case BUILT_IN_SUB_AND_FETCH_16
:
6879 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SUB_AND_FETCH_1
);
6880 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6881 true, target
, ignore
);
6886 case BUILT_IN_OR_AND_FETCH_1
:
6887 case BUILT_IN_OR_AND_FETCH_2
:
6888 case BUILT_IN_OR_AND_FETCH_4
:
6889 case BUILT_IN_OR_AND_FETCH_8
:
6890 case BUILT_IN_OR_AND_FETCH_16
:
6891 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_OR_AND_FETCH_1
);
6892 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6893 true, target
, ignore
);
6898 case BUILT_IN_AND_AND_FETCH_1
:
6899 case BUILT_IN_AND_AND_FETCH_2
:
6900 case BUILT_IN_AND_AND_FETCH_4
:
6901 case BUILT_IN_AND_AND_FETCH_8
:
6902 case BUILT_IN_AND_AND_FETCH_16
:
6903 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_AND_AND_FETCH_1
);
6904 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6905 true, target
, ignore
);
6910 case BUILT_IN_XOR_AND_FETCH_1
:
6911 case BUILT_IN_XOR_AND_FETCH_2
:
6912 case BUILT_IN_XOR_AND_FETCH_4
:
6913 case BUILT_IN_XOR_AND_FETCH_8
:
6914 case BUILT_IN_XOR_AND_FETCH_16
:
6915 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_XOR_AND_FETCH_1
);
6916 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6917 true, target
, ignore
);
6922 case BUILT_IN_NAND_AND_FETCH_1
:
6923 case BUILT_IN_NAND_AND_FETCH_2
:
6924 case BUILT_IN_NAND_AND_FETCH_4
:
6925 case BUILT_IN_NAND_AND_FETCH_8
:
6926 case BUILT_IN_NAND_AND_FETCH_16
:
6927 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_NAND_AND_FETCH_1
);
6928 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6929 true, target
, ignore
);
6934 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1
:
6935 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2
:
6936 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4
:
6937 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8
:
6938 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16
:
6939 if (mode
== VOIDmode
)
6940 mode
= TYPE_MODE (boolean_type_node
);
6941 if (!target
|| !register_operand (target
, mode
))
6942 target
= gen_reg_rtx (mode
);
6944 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_BOOL_COMPARE_AND_SWAP_1
);
6945 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6950 case BUILT_IN_VAL_COMPARE_AND_SWAP_1
:
6951 case BUILT_IN_VAL_COMPARE_AND_SWAP_2
:
6952 case BUILT_IN_VAL_COMPARE_AND_SWAP_4
:
6953 case BUILT_IN_VAL_COMPARE_AND_SWAP_8
:
6954 case BUILT_IN_VAL_COMPARE_AND_SWAP_16
:
6955 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_VAL_COMPARE_AND_SWAP_1
);
6956 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6961 case BUILT_IN_LOCK_TEST_AND_SET_1
:
6962 case BUILT_IN_LOCK_TEST_AND_SET_2
:
6963 case BUILT_IN_LOCK_TEST_AND_SET_4
:
6964 case BUILT_IN_LOCK_TEST_AND_SET_8
:
6965 case BUILT_IN_LOCK_TEST_AND_SET_16
:
6966 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_TEST_AND_SET_1
);
6967 target
= expand_builtin_lock_test_and_set (mode
, exp
, target
);
6972 case BUILT_IN_LOCK_RELEASE_1
:
6973 case BUILT_IN_LOCK_RELEASE_2
:
6974 case BUILT_IN_LOCK_RELEASE_4
:
6975 case BUILT_IN_LOCK_RELEASE_8
:
6976 case BUILT_IN_LOCK_RELEASE_16
:
6977 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_RELEASE_1
);
6978 expand_builtin_lock_release (mode
, exp
);
6981 case BUILT_IN_SYNCHRONIZE
:
6982 expand_builtin_synchronize ();
6985 case BUILT_IN_OBJECT_SIZE
:
6986 return expand_builtin_object_size (exp
);
6988 case BUILT_IN_MEMCPY_CHK
:
6989 case BUILT_IN_MEMPCPY_CHK
:
6990 case BUILT_IN_MEMMOVE_CHK
:
6991 case BUILT_IN_MEMSET_CHK
:
6992 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6997 case BUILT_IN_STRCPY_CHK
:
6998 case BUILT_IN_STPCPY_CHK
:
6999 case BUILT_IN_STRNCPY_CHK
:
7000 case BUILT_IN_STRCAT_CHK
:
7001 case BUILT_IN_STRNCAT_CHK
:
7002 case BUILT_IN_SNPRINTF_CHK
:
7003 case BUILT_IN_VSNPRINTF_CHK
:
7004 maybe_emit_chk_warning (exp
, fcode
);
7007 case BUILT_IN_SPRINTF_CHK
:
7008 case BUILT_IN_VSPRINTF_CHK
:
7009 maybe_emit_sprintf_chk_warning (exp
, fcode
);
7013 maybe_emit_free_warning (exp
);
7016 default: /* just do library call, if unknown builtin */
7020 /* The switch statement above can drop through to cause the function
7021 to be called normally. */
7022 return expand_call (exp
, target
, ignore
);
7025 /* Determine whether a tree node represents a call to a built-in
7026 function. If the tree T is a call to a built-in function with
7027 the right number of arguments of the appropriate types, return
7028 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7029 Otherwise the return value is END_BUILTINS. */
7031 enum built_in_function
7032 builtin_mathfn_code (const_tree t
)
7034 const_tree fndecl
, arg
, parmlist
;
7035 const_tree argtype
, parmtype
;
7036 const_call_expr_arg_iterator iter
;
7038 if (TREE_CODE (t
) != CALL_EXPR
7039 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7040 return END_BUILTINS
;
7042 fndecl
= get_callee_fndecl (t
);
7043 if (fndecl
== NULL_TREE
7044 || TREE_CODE (fndecl
) != FUNCTION_DECL
7045 || ! DECL_BUILT_IN (fndecl
)
7046 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7047 return END_BUILTINS
;
7049 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7050 init_const_call_expr_arg_iterator (t
, &iter
);
7051 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7053 /* If a function doesn't take a variable number of arguments,
7054 the last element in the list will have type `void'. */
7055 parmtype
= TREE_VALUE (parmlist
);
7056 if (VOID_TYPE_P (parmtype
))
7058 if (more_const_call_expr_args_p (&iter
))
7059 return END_BUILTINS
;
7060 return DECL_FUNCTION_CODE (fndecl
);
7063 if (! more_const_call_expr_args_p (&iter
))
7064 return END_BUILTINS
;
7066 arg
= next_const_call_expr_arg (&iter
);
7067 argtype
= TREE_TYPE (arg
);
7069 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7071 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7072 return END_BUILTINS
;
7074 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7076 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7077 return END_BUILTINS
;
7079 else if (POINTER_TYPE_P (parmtype
))
7081 if (! POINTER_TYPE_P (argtype
))
7082 return END_BUILTINS
;
7084 else if (INTEGRAL_TYPE_P (parmtype
))
7086 if (! INTEGRAL_TYPE_P (argtype
))
7087 return END_BUILTINS
;
7090 return END_BUILTINS
;
7093 /* Variable-length argument list. */
7094 return DECL_FUNCTION_CODE (fndecl
);
7097 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7098 evaluate to a constant. */
7101 fold_builtin_constant_p (tree arg
)
7103 /* We return 1 for a numeric type that's known to be a constant
7104 value at compile-time or for an aggregate type that's a
7105 literal constant. */
7108 /* If we know this is a constant, emit the constant of one. */
7109 if (CONSTANT_CLASS_P (arg
)
7110 || (TREE_CODE (arg
) == CONSTRUCTOR
7111 && TREE_CONSTANT (arg
)))
7112 return integer_one_node
;
7113 if (TREE_CODE (arg
) == ADDR_EXPR
)
7115 tree op
= TREE_OPERAND (arg
, 0);
7116 if (TREE_CODE (op
) == STRING_CST
7117 || (TREE_CODE (op
) == ARRAY_REF
7118 && integer_zerop (TREE_OPERAND (op
, 1))
7119 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7120 return integer_one_node
;
7123 /* If this expression has side effects, show we don't know it to be a
7124 constant. Likewise if it's a pointer or aggregate type since in
7125 those case we only want literals, since those are only optimized
7126 when generating RTL, not later.
7127 And finally, if we are compiling an initializer, not code, we
7128 need to return a definite result now; there's not going to be any
7129 more optimization done. */
7130 if (TREE_SIDE_EFFECTS (arg
)
7131 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7132 || POINTER_TYPE_P (TREE_TYPE (arg
))
7134 || folding_initializer
)
7135 return integer_zero_node
;
7140 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7141 return it as a truthvalue. */
7144 build_builtin_expect_predicate (tree pred
, tree expected
)
7146 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7148 fn
= built_in_decls
[BUILT_IN_EXPECT
];
7149 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7150 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7151 pred_type
= TREE_VALUE (arg_types
);
7152 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7154 pred
= fold_convert (pred_type
, pred
);
7155 expected
= fold_convert (expected_type
, expected
);
7156 call_expr
= build_call_expr (fn
, 2, pred
, expected
);
7158 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7159 build_int_cst (ret_type
, 0));
7162 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7163 NULL_TREE if no simplification is possible. */
7166 fold_builtin_expect (tree arg0
, tree arg1
)
7169 enum tree_code code
;
7171 /* If this is a builtin_expect within a builtin_expect keep the
7172 inner one. See through a comparison against a constant. It
7173 might have been added to create a thruthvalue. */
7175 if (COMPARISON_CLASS_P (inner
)
7176 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7177 inner
= TREE_OPERAND (inner
, 0);
7179 if (TREE_CODE (inner
) == CALL_EXPR
7180 && (fndecl
= get_callee_fndecl (inner
))
7181 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7182 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7185 /* Distribute the expected value over short-circuiting operators.
7186 See through the cast from truthvalue_type_node to long. */
7188 while (TREE_CODE (inner
) == NOP_EXPR
7189 && INTEGRAL_TYPE_P (TREE_TYPE (inner
))
7190 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner
, 0))))
7191 inner
= TREE_OPERAND (inner
, 0);
7193 code
= TREE_CODE (inner
);
7194 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7196 tree op0
= TREE_OPERAND (inner
, 0);
7197 tree op1
= TREE_OPERAND (inner
, 1);
7199 op0
= build_builtin_expect_predicate (op0
, arg1
);
7200 op1
= build_builtin_expect_predicate (op1
, arg1
);
7201 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7203 return fold_convert (TREE_TYPE (arg0
), inner
);
7206 /* If the argument isn't invariant then there's nothing else we can do. */
7207 if (!TREE_CONSTANT (arg0
))
7210 /* If we expect that a comparison against the argument will fold to
7211 a constant return the constant. In practice, this means a true
7212 constant or the address of a non-weak symbol. */
7215 if (TREE_CODE (inner
) == ADDR_EXPR
)
7219 inner
= TREE_OPERAND (inner
, 0);
7221 while (TREE_CODE (inner
) == COMPONENT_REF
7222 || TREE_CODE (inner
) == ARRAY_REF
);
7223 if (DECL_P (inner
) && DECL_WEAK (inner
))
7227 /* Otherwise, ARG0 already has the proper type for the return value. */
7231 /* Fold a call to __builtin_classify_type with argument ARG. */
7234 fold_builtin_classify_type (tree arg
)
7237 return build_int_cst (NULL_TREE
, no_type_class
);
7239 return build_int_cst (NULL_TREE
, type_to_class (TREE_TYPE (arg
)));
7242 /* Fold a call to __builtin_strlen with argument ARG. */
7245 fold_builtin_strlen (tree arg
)
7247 if (!validate_arg (arg
, POINTER_TYPE
))
7251 tree len
= c_strlen (arg
, 0);
7255 /* Convert from the internal "sizetype" type to "size_t". */
7257 len
= fold_convert (size_type_node
, len
);
7265 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7268 fold_builtin_inf (tree type
, int warn
)
7270 REAL_VALUE_TYPE real
;
7272 /* __builtin_inff is intended to be usable to define INFINITY on all
7273 targets. If an infinity is not available, INFINITY expands "to a
7274 positive constant of type float that overflows at translation
7275 time", footnote "In this case, using INFINITY will violate the
7276 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7277 Thus we pedwarn to ensure this constraint violation is
7279 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7280 pedwarn (0, "target format does not support infinity");
7283 return build_real (type
, real
);
7286 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7289 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7291 REAL_VALUE_TYPE real
;
7294 if (!validate_arg (arg
, POINTER_TYPE
))
7296 str
= c_getstr (arg
);
7300 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7303 return build_real (type
, real
);
7306 /* Return true if the floating point expression T has an integer value.
7307 We also allow +Inf, -Inf and NaN to be considered integer values. */
7310 integer_valued_real_p (tree t
)
7312 switch (TREE_CODE (t
))
7319 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7324 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7331 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7332 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7335 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7336 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7339 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7343 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7344 if (TREE_CODE (type
) == INTEGER_TYPE
)
7346 if (TREE_CODE (type
) == REAL_TYPE
)
7347 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7352 switch (builtin_mathfn_code (t
))
7354 CASE_FLT_FN (BUILT_IN_CEIL
):
7355 CASE_FLT_FN (BUILT_IN_FLOOR
):
7356 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7357 CASE_FLT_FN (BUILT_IN_RINT
):
7358 CASE_FLT_FN (BUILT_IN_ROUND
):
7359 CASE_FLT_FN (BUILT_IN_TRUNC
):
7362 CASE_FLT_FN (BUILT_IN_FMIN
):
7363 CASE_FLT_FN (BUILT_IN_FMAX
):
7364 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7365 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7378 /* FNDECL is assumed to be a builtin where truncation can be propagated
7379 across (for instance floor((double)f) == (double)floorf (f).
7380 Do the transformation for a call with argument ARG. */
7383 fold_trunc_transparent_mathfn (tree fndecl
, tree arg
)
7385 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7387 if (!validate_arg (arg
, REAL_TYPE
))
7390 /* Integer rounding functions are idempotent. */
7391 if (fcode
== builtin_mathfn_code (arg
))
7394 /* If argument is already integer valued, and we don't need to worry
7395 about setting errno, there's no need to perform rounding. */
7396 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7401 tree arg0
= strip_float_extensions (arg
);
7402 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7403 tree newtype
= TREE_TYPE (arg0
);
7406 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7407 && (decl
= mathfn_built_in (newtype
, fcode
)))
7408 return fold_convert (ftype
,
7409 build_call_expr (decl
, 1,
7410 fold_convert (newtype
, arg0
)));
7415 /* FNDECL is assumed to be builtin which can narrow the FP type of
7416 the argument, for instance lround((double)f) -> lroundf (f).
7417 Do the transformation for a call with argument ARG. */
7420 fold_fixed_mathfn (tree fndecl
, tree arg
)
7422 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7424 if (!validate_arg (arg
, REAL_TYPE
))
7427 /* If argument is already integer valued, and we don't need to worry
7428 about setting errno, there's no need to perform rounding. */
7429 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7430 return fold_build1 (FIX_TRUNC_EXPR
, TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7434 tree ftype
= TREE_TYPE (arg
);
7435 tree arg0
= strip_float_extensions (arg
);
7436 tree newtype
= TREE_TYPE (arg0
);
7439 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7440 && (decl
= mathfn_built_in (newtype
, fcode
)))
7441 return build_call_expr (decl
, 1, fold_convert (newtype
, arg0
));
7444 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7445 sizeof (long long) == sizeof (long). */
7446 if (TYPE_PRECISION (long_long_integer_type_node
)
7447 == TYPE_PRECISION (long_integer_type_node
))
7449 tree newfn
= NULL_TREE
;
7452 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7453 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7456 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7457 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7460 CASE_FLT_FN (BUILT_IN_LLROUND
):
7461 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7464 CASE_FLT_FN (BUILT_IN_LLRINT
):
7465 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7474 tree newcall
= build_call_expr(newfn
, 1, arg
);
7475 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7482 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7483 return type. Return NULL_TREE if no simplification can be made. */
7486 fold_builtin_cabs (tree arg
, tree type
, tree fndecl
)
7490 if (TREE_CODE (TREE_TYPE (arg
)) != COMPLEX_TYPE
7491 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7494 /* Calculate the result when the argument is a constant. */
7495 if (TREE_CODE (arg
) == COMPLEX_CST
7496 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7500 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7502 tree real
= TREE_OPERAND (arg
, 0);
7503 tree imag
= TREE_OPERAND (arg
, 1);
7505 /* If either part is zero, cabs is fabs of the other. */
7506 if (real_zerop (real
))
7507 return fold_build1 (ABS_EXPR
, type
, imag
);
7508 if (real_zerop (imag
))
7509 return fold_build1 (ABS_EXPR
, type
, real
);
7511 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7512 if (flag_unsafe_math_optimizations
7513 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7515 const REAL_VALUE_TYPE sqrt2_trunc
7516 = real_value_truncate (TYPE_MODE (type
),
7517 *get_real_const (rv_sqrt2
));
7519 return fold_build2 (MULT_EXPR
, type
,
7520 fold_build1 (ABS_EXPR
, type
, real
),
7521 build_real (type
, sqrt2_trunc
));
7525 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7526 if (TREE_CODE (arg
) == NEGATE_EXPR
7527 || TREE_CODE (arg
) == CONJ_EXPR
)
7528 return build_call_expr (fndecl
, 1, TREE_OPERAND (arg
, 0));
7530 /* Don't do this when optimizing for size. */
7531 if (flag_unsafe_math_optimizations
7532 && optimize
&& !optimize_size
)
7534 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7536 if (sqrtfn
!= NULL_TREE
)
7538 tree rpart
, ipart
, result
;
7540 arg
= builtin_save_expr (arg
);
7542 rpart
= fold_build1 (REALPART_EXPR
, type
, arg
);
7543 ipart
= fold_build1 (IMAGPART_EXPR
, type
, arg
);
7545 rpart
= builtin_save_expr (rpart
);
7546 ipart
= builtin_save_expr (ipart
);
7548 result
= fold_build2 (PLUS_EXPR
, type
,
7549 fold_build2 (MULT_EXPR
, type
,
7551 fold_build2 (MULT_EXPR
, type
,
7554 return build_call_expr (sqrtfn
, 1, result
);
7561 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7562 Return NULL_TREE if no simplification can be made. */
7565 fold_builtin_sqrt (tree arg
, tree type
)
7568 enum built_in_function fcode
;
7571 if (!validate_arg (arg
, REAL_TYPE
))
7574 /* Calculate the result when the argument is a constant. */
7575 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7578 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7579 fcode
= builtin_mathfn_code (arg
);
7580 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7582 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7583 arg
= fold_build2 (MULT_EXPR
, type
,
7584 CALL_EXPR_ARG (arg
, 0),
7585 build_real (type
, dconsthalf
));
7586 return build_call_expr (expfn
, 1, arg
);
7589 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7590 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7592 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7596 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7598 /* The inner root was either sqrt or cbrt. */
7599 REAL_VALUE_TYPE dconstroot
=
7600 BUILTIN_SQRT_P (fcode
) ? dconsthalf
: *get_real_const (rv_third
);
7602 /* Adjust for the outer root. */
7603 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7604 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7605 tree_root
= build_real (type
, dconstroot
);
7606 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7610 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7611 if (flag_unsafe_math_optimizations
7612 && (fcode
== BUILT_IN_POW
7613 || fcode
== BUILT_IN_POWF
7614 || fcode
== BUILT_IN_POWL
))
7616 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7617 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7618 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7620 if (!tree_expr_nonnegative_p (arg0
))
7621 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7622 narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
7623 build_real (type
, dconsthalf
));
7624 return build_call_expr (powfn
, 2, arg0
, narg1
);
7630 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7631 Return NULL_TREE if no simplification can be made. */
7634 fold_builtin_cbrt (tree arg
, tree type
)
7636 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7639 if (!validate_arg (arg
, REAL_TYPE
))
7642 /* Calculate the result when the argument is a constant. */
7643 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7646 if (flag_unsafe_math_optimizations
)
7648 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7649 if (BUILTIN_EXPONENT_P (fcode
))
7651 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7652 const REAL_VALUE_TYPE third_trunc
=
7653 real_value_truncate (TYPE_MODE (type
), *get_real_const (rv_third
));
7654 arg
= fold_build2 (MULT_EXPR
, type
,
7655 CALL_EXPR_ARG (arg
, 0),
7656 build_real (type
, third_trunc
));
7657 return build_call_expr (expfn
, 1, arg
);
7660 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7661 if (BUILTIN_SQRT_P (fcode
))
7663 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7667 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7669 REAL_VALUE_TYPE dconstroot
= *get_real_const (rv_third
);
7671 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7672 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7673 tree_root
= build_real (type
, dconstroot
);
7674 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7678 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7679 if (BUILTIN_CBRT_P (fcode
))
7681 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7682 if (tree_expr_nonnegative_p (arg0
))
7684 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7689 REAL_VALUE_TYPE dconstroot
;
7691 real_arithmetic (&dconstroot
, MULT_EXPR
,
7692 get_real_const (rv_third
),
7693 get_real_const (rv_third
));
7694 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7695 tree_root
= build_real (type
, dconstroot
);
7696 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7701 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7702 if (fcode
== BUILT_IN_POW
7703 || fcode
== BUILT_IN_POWF
7704 || fcode
== BUILT_IN_POWL
)
7706 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7707 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7708 if (tree_expr_nonnegative_p (arg00
))
7710 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7711 const REAL_VALUE_TYPE dconstroot
7712 = real_value_truncate (TYPE_MODE (type
),
7713 *get_real_const (rv_third
));
7714 tree narg01
= fold_build2 (MULT_EXPR
, type
, arg01
,
7715 build_real (type
, dconstroot
));
7716 return build_call_expr (powfn
, 2, arg00
, narg01
);
7723 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7724 TYPE is the type of the return value. Return NULL_TREE if no
7725 simplification can be made. */
7728 fold_builtin_cos (tree arg
, tree type
, tree fndecl
)
7732 if (!validate_arg (arg
, REAL_TYPE
))
7735 /* Calculate the result when the argument is a constant. */
7736 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7739 /* Optimize cos(-x) into cos (x). */
7740 if ((narg
= fold_strip_sign_ops (arg
)))
7741 return build_call_expr (fndecl
, 1, narg
);
7746 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7747 Return NULL_TREE if no simplification can be made. */
7750 fold_builtin_cosh (tree arg
, tree type
, tree fndecl
)
7752 if (validate_arg (arg
, REAL_TYPE
))
7756 /* Calculate the result when the argument is a constant. */
7757 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7760 /* Optimize cosh(-x) into cosh (x). */
7761 if ((narg
= fold_strip_sign_ops (arg
)))
7762 return build_call_expr (fndecl
, 1, narg
);
7768 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7769 Return NULL_TREE if no simplification can be made. */
7772 fold_builtin_tan (tree arg
, tree type
)
7774 enum built_in_function fcode
;
7777 if (!validate_arg (arg
, REAL_TYPE
))
7780 /* Calculate the result when the argument is a constant. */
7781 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7784 /* Optimize tan(atan(x)) = x. */
7785 fcode
= builtin_mathfn_code (arg
);
7786 if (flag_unsafe_math_optimizations
7787 && (fcode
== BUILT_IN_ATAN
7788 || fcode
== BUILT_IN_ATANF
7789 || fcode
== BUILT_IN_ATANL
))
7790 return CALL_EXPR_ARG (arg
, 0);
7795 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7796 NULL_TREE if no simplification can be made. */
7799 fold_builtin_sincos (tree arg0
, tree arg1
, tree arg2
)
7804 if (!validate_arg (arg0
, REAL_TYPE
)
7805 || !validate_arg (arg1
, POINTER_TYPE
)
7806 || !validate_arg (arg2
, POINTER_TYPE
))
7809 type
= TREE_TYPE (arg0
);
7811 /* Calculate the result when the argument is a constant. */
7812 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7815 /* Canonicalize sincos to cexpi. */
7816 if (!TARGET_C99_FUNCTIONS
)
7818 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7822 call
= build_call_expr (fn
, 1, arg0
);
7823 call
= builtin_save_expr (call
);
7825 return build2 (COMPOUND_EXPR
, type
,
7826 build2 (MODIFY_EXPR
, void_type_node
,
7827 build_fold_indirect_ref (arg1
),
7828 build1 (IMAGPART_EXPR
, type
, call
)),
7829 build2 (MODIFY_EXPR
, void_type_node
,
7830 build_fold_indirect_ref (arg2
),
7831 build1 (REALPART_EXPR
, type
, call
)));
7834 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7835 NULL_TREE if no simplification can be made. */
7838 fold_builtin_cexp (tree arg0
, tree type
)
7841 tree realp
, imagp
, ifn
;
7843 if (!validate_arg (arg0
, COMPLEX_TYPE
))
7846 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7848 /* In case we can figure out the real part of arg0 and it is constant zero
7850 if (!TARGET_C99_FUNCTIONS
)
7852 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7856 if ((realp
= fold_unary (REALPART_EXPR
, rtype
, arg0
))
7857 && real_zerop (realp
))
7859 tree narg
= fold_build1 (IMAGPART_EXPR
, rtype
, arg0
);
7860 return build_call_expr (ifn
, 1, narg
);
7863 /* In case we can easily decompose real and imaginary parts split cexp
7864 to exp (r) * cexpi (i). */
7865 if (flag_unsafe_math_optimizations
7868 tree rfn
, rcall
, icall
;
7870 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7874 imagp
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
7878 icall
= build_call_expr (ifn
, 1, imagp
);
7879 icall
= builtin_save_expr (icall
);
7880 rcall
= build_call_expr (rfn
, 1, realp
);
7881 rcall
= builtin_save_expr (rcall
);
7882 return fold_build2 (COMPLEX_EXPR
, type
,
7883 fold_build2 (MULT_EXPR
, rtype
,
7885 fold_build1 (REALPART_EXPR
, rtype
, icall
)),
7886 fold_build2 (MULT_EXPR
, rtype
,
7888 fold_build1 (IMAGPART_EXPR
, rtype
, icall
)));
7894 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7895 Return NULL_TREE if no simplification can be made. */
7898 fold_builtin_trunc (tree fndecl
, tree arg
)
7900 if (!validate_arg (arg
, REAL_TYPE
))
7903 /* Optimize trunc of constant value. */
7904 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7906 REAL_VALUE_TYPE r
, x
;
7907 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7909 x
= TREE_REAL_CST (arg
);
7910 real_trunc (&r
, TYPE_MODE (type
), &x
);
7911 return build_real (type
, r
);
7914 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7917 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7918 Return NULL_TREE if no simplification can be made. */
7921 fold_builtin_floor (tree fndecl
, tree arg
)
7923 if (!validate_arg (arg
, REAL_TYPE
))
7926 /* Optimize floor of constant value. */
7927 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7931 x
= TREE_REAL_CST (arg
);
7932 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7934 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7937 real_floor (&r
, TYPE_MODE (type
), &x
);
7938 return build_real (type
, r
);
7942 /* Fold floor (x) where x is nonnegative to trunc (x). */
7943 if (tree_expr_nonnegative_p (arg
))
7945 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7947 return build_call_expr (truncfn
, 1, arg
);
7950 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7953 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7954 Return NULL_TREE if no simplification can be made. */
7957 fold_builtin_ceil (tree fndecl
, tree arg
)
7959 if (!validate_arg (arg
, REAL_TYPE
))
7962 /* Optimize ceil of constant value. */
7963 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7967 x
= TREE_REAL_CST (arg
);
7968 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7970 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7973 real_ceil (&r
, TYPE_MODE (type
), &x
);
7974 return build_real (type
, r
);
7978 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7981 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7982 Return NULL_TREE if no simplification can be made. */
7985 fold_builtin_round (tree fndecl
, tree arg
)
7987 if (!validate_arg (arg
, REAL_TYPE
))
7990 /* Optimize round of constant value. */
7991 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7995 x
= TREE_REAL_CST (arg
);
7996 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7998 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8001 real_round (&r
, TYPE_MODE (type
), &x
);
8002 return build_real (type
, r
);
8006 return fold_trunc_transparent_mathfn (fndecl
, arg
);
8009 /* Fold function call to builtin lround, lroundf or lroundl (or the
8010 corresponding long long versions) and other rounding functions. ARG
8011 is the argument to the call. Return NULL_TREE if no simplification
8015 fold_builtin_int_roundingfn (tree fndecl
, tree arg
)
8017 if (!validate_arg (arg
, REAL_TYPE
))
8020 /* Optimize lround of constant value. */
8021 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8023 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
8025 if (real_isfinite (&x
))
8027 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
8028 tree ftype
= TREE_TYPE (arg
);
8029 unsigned HOST_WIDE_INT lo2
;
8030 HOST_WIDE_INT hi
, lo
;
8033 switch (DECL_FUNCTION_CODE (fndecl
))
8035 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8036 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8037 real_floor (&r
, TYPE_MODE (ftype
), &x
);
8040 CASE_FLT_FN (BUILT_IN_LCEIL
):
8041 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8042 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8045 CASE_FLT_FN (BUILT_IN_LROUND
):
8046 CASE_FLT_FN (BUILT_IN_LLROUND
):
8047 real_round (&r
, TYPE_MODE (ftype
), &x
);
8054 REAL_VALUE_TO_INT (&lo
, &hi
, r
);
8055 if (!fit_double_type (lo
, hi
, &lo2
, &hi
, itype
))
8056 return build_int_cst_wide (itype
, lo2
, hi
);
8060 switch (DECL_FUNCTION_CODE (fndecl
))
8062 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8063 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8064 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8065 if (tree_expr_nonnegative_p (arg
))
8066 return fold_build1 (FIX_TRUNC_EXPR
, TREE_TYPE (TREE_TYPE (fndecl
)),
8072 return fold_fixed_mathfn (fndecl
, arg
);
8075 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8076 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8077 the argument to the call. Return NULL_TREE if no simplification can
8081 fold_builtin_bitop (tree fndecl
, tree arg
)
8083 if (!validate_arg (arg
, INTEGER_TYPE
))
8086 /* Optimize for constant argument. */
8087 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8089 HOST_WIDE_INT hi
, width
, result
;
8090 unsigned HOST_WIDE_INT lo
;
8093 type
= TREE_TYPE (arg
);
8094 width
= TYPE_PRECISION (type
);
8095 lo
= TREE_INT_CST_LOW (arg
);
8097 /* Clear all the bits that are beyond the type's precision. */
8098 if (width
> HOST_BITS_PER_WIDE_INT
)
8100 hi
= TREE_INT_CST_HIGH (arg
);
8101 if (width
< 2 * HOST_BITS_PER_WIDE_INT
)
8102 hi
&= ~((HOST_WIDE_INT
) (-1) >> (width
- HOST_BITS_PER_WIDE_INT
));
8107 if (width
< HOST_BITS_PER_WIDE_INT
)
8108 lo
&= ~((unsigned HOST_WIDE_INT
) (-1) << width
);
8111 switch (DECL_FUNCTION_CODE (fndecl
))
8113 CASE_INT_FN (BUILT_IN_FFS
):
8115 result
= exact_log2 (lo
& -lo
) + 1;
8117 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
) + 1;
8122 CASE_INT_FN (BUILT_IN_CLZ
):
8124 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
8126 result
= width
- floor_log2 (lo
) - 1;
8127 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8131 CASE_INT_FN (BUILT_IN_CTZ
):
8133 result
= exact_log2 (lo
& -lo
);
8135 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
);
8136 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8140 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8143 result
++, lo
&= lo
- 1;
8145 result
++, hi
&= hi
- 1;
8148 CASE_INT_FN (BUILT_IN_PARITY
):
8151 result
++, lo
&= lo
- 1;
8153 result
++, hi
&= hi
- 1;
8161 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8167 /* Fold function call to builtin_bswap and the long and long long
8168 variants. Return NULL_TREE if no simplification can be made. */
8170 fold_builtin_bswap (tree fndecl
, tree arg
)
8172 if (! validate_arg (arg
, INTEGER_TYPE
))
8175 /* Optimize constant value. */
8176 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8178 HOST_WIDE_INT hi
, width
, r_hi
= 0;
8179 unsigned HOST_WIDE_INT lo
, r_lo
= 0;
8182 type
= TREE_TYPE (arg
);
8183 width
= TYPE_PRECISION (type
);
8184 lo
= TREE_INT_CST_LOW (arg
);
8185 hi
= TREE_INT_CST_HIGH (arg
);
8187 switch (DECL_FUNCTION_CODE (fndecl
))
8189 case BUILT_IN_BSWAP32
:
8190 case BUILT_IN_BSWAP64
:
8194 for (s
= 0; s
< width
; s
+= 8)
8196 int d
= width
- s
- 8;
8197 unsigned HOST_WIDE_INT byte
;
8199 if (s
< HOST_BITS_PER_WIDE_INT
)
8200 byte
= (lo
>> s
) & 0xff;
8202 byte
= (hi
>> (s
- HOST_BITS_PER_WIDE_INT
)) & 0xff;
8204 if (d
< HOST_BITS_PER_WIDE_INT
)
8207 r_hi
|= byte
<< (d
- HOST_BITS_PER_WIDE_INT
);
8217 if (width
< HOST_BITS_PER_WIDE_INT
)
8218 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
);
8220 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
, r_hi
);
8226 /* Return true if EXPR is the real constant contained in VALUE. */
8229 real_dconstp (tree expr
, const REAL_VALUE_TYPE
*value
)
8233 return ((TREE_CODE (expr
) == REAL_CST
8234 && !TREE_OVERFLOW (expr
)
8235 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), *value
))
8236 || (TREE_CODE (expr
) == COMPLEX_CST
8237 && real_dconstp (TREE_REALPART (expr
), value
)
8238 && real_zerop (TREE_IMAGPART (expr
))));
8241 /* A subroutine of fold_builtin to fold the various logarithmic
8242 functions. Return NULL_TREE if no simplification can me made.
8243 FUNC is the corresponding MPFR logarithm function. */
8246 fold_builtin_logarithm (tree fndecl
, tree arg
,
8247 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8249 if (validate_arg (arg
, REAL_TYPE
))
8251 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8253 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8255 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8256 instead we'll look for 'e' truncated to MODE. So only do
8257 this if flag_unsafe_math_optimizations is set. */
8258 if (flag_unsafe_math_optimizations
&& func
== mpfr_log
)
8260 const REAL_VALUE_TYPE e_truncated
=
8261 real_value_truncate (TYPE_MODE (type
), *get_real_const (rv_e
));
8262 if (real_dconstp (arg
, &e_truncated
))
8263 return build_real (type
, dconst1
);
8266 /* Calculate the result when the argument is a constant. */
8267 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
8270 /* Special case, optimize logN(expN(x)) = x. */
8271 if (flag_unsafe_math_optimizations
8272 && ((func
== mpfr_log
8273 && (fcode
== BUILT_IN_EXP
8274 || fcode
== BUILT_IN_EXPF
8275 || fcode
== BUILT_IN_EXPL
))
8276 || (func
== mpfr_log2
8277 && (fcode
== BUILT_IN_EXP2
8278 || fcode
== BUILT_IN_EXP2F
8279 || fcode
== BUILT_IN_EXP2L
))
8280 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
8281 return fold_convert (type
, CALL_EXPR_ARG (arg
, 0));
8283 /* Optimize logN(func()) for various exponential functions. We
8284 want to determine the value "x" and the power "exponent" in
8285 order to transform logN(x**exponent) into exponent*logN(x). */
8286 if (flag_unsafe_math_optimizations
)
8288 tree exponent
= 0, x
= 0;
8292 CASE_FLT_FN (BUILT_IN_EXP
):
8293 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8294 x
= build_real (type
,
8295 real_value_truncate (TYPE_MODE (type
),
8296 *get_real_const (rv_e
)));
8297 exponent
= CALL_EXPR_ARG (arg
, 0);
8299 CASE_FLT_FN (BUILT_IN_EXP2
):
8300 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8301 x
= build_real (type
, dconst2
);
8302 exponent
= CALL_EXPR_ARG (arg
, 0);
8304 CASE_FLT_FN (BUILT_IN_EXP10
):
8305 CASE_FLT_FN (BUILT_IN_POW10
):
8306 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8308 REAL_VALUE_TYPE dconst10
;
8309 real_from_integer (&dconst10
, VOIDmode
, 10, 0, 0);
8310 x
= build_real (type
, dconst10
);
8312 exponent
= CALL_EXPR_ARG (arg
, 0);
8314 CASE_FLT_FN (BUILT_IN_SQRT
):
8315 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8316 x
= CALL_EXPR_ARG (arg
, 0);
8317 exponent
= build_real (type
, dconsthalf
);
8319 CASE_FLT_FN (BUILT_IN_CBRT
):
8320 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8321 x
= CALL_EXPR_ARG (arg
, 0);
8322 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8323 *get_real_const (rv_third
)));
8325 CASE_FLT_FN (BUILT_IN_POW
):
8326 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8327 x
= CALL_EXPR_ARG (arg
, 0);
8328 exponent
= CALL_EXPR_ARG (arg
, 1);
8334 /* Now perform the optimization. */
8337 tree logfn
= build_call_expr (fndecl
, 1, x
);
8338 return fold_build2 (MULT_EXPR
, type
, exponent
, logfn
);
8346 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8347 NULL_TREE if no simplification can be made. */
8350 fold_builtin_hypot (tree fndecl
, tree arg0
, tree arg1
, tree type
)
8352 tree res
, narg0
, narg1
;
8354 if (!validate_arg (arg0
, REAL_TYPE
)
8355 || !validate_arg (arg1
, REAL_TYPE
))
8358 /* Calculate the result when the argument is a constant. */
8359 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8362 /* If either argument to hypot has a negate or abs, strip that off.
8363 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8364 narg0
= fold_strip_sign_ops (arg0
);
8365 narg1
= fold_strip_sign_ops (arg1
);
8368 return build_call_expr (fndecl
, 2, narg0
? narg0
: arg0
,
8369 narg1
? narg1
: arg1
);
8372 /* If either argument is zero, hypot is fabs of the other. */
8373 if (real_zerop (arg0
))
8374 return fold_build1 (ABS_EXPR
, type
, arg1
);
8375 else if (real_zerop (arg1
))
8376 return fold_build1 (ABS_EXPR
, type
, arg0
);
8378 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8379 if (flag_unsafe_math_optimizations
8380 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8382 const REAL_VALUE_TYPE sqrt2_trunc
8383 = real_value_truncate (TYPE_MODE (type
), *get_real_const (rv_sqrt2
));
8384 return fold_build2 (MULT_EXPR
, type
,
8385 fold_build1 (ABS_EXPR
, type
, arg0
),
8386 build_real (type
, sqrt2_trunc
));
8393 /* Fold a builtin function call to pow, powf, or powl. Return
8394 NULL_TREE if no simplification can be made. */
8396 fold_builtin_pow (tree fndecl
, tree arg0
, tree arg1
, tree type
)
8400 if (!validate_arg (arg0
, REAL_TYPE
)
8401 || !validate_arg (arg1
, REAL_TYPE
))
8404 /* Calculate the result when the argument is a constant. */
8405 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8408 /* Optimize pow(1.0,y) = 1.0. */
8409 if (real_onep (arg0
))
8410 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
8412 if (TREE_CODE (arg1
) == REAL_CST
8413 && !TREE_OVERFLOW (arg1
))
8415 REAL_VALUE_TYPE cint
;
8419 c
= TREE_REAL_CST (arg1
);
8421 /* Optimize pow(x,0.0) = 1.0. */
8422 if (REAL_VALUES_EQUAL (c
, dconst0
))
8423 return omit_one_operand (type
, build_real (type
, dconst1
),
8426 /* Optimize pow(x,1.0) = x. */
8427 if (REAL_VALUES_EQUAL (c
, dconst1
))
8430 /* Optimize pow(x,-1.0) = 1.0/x. */
8431 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8432 return fold_build2 (RDIV_EXPR
, type
,
8433 build_real (type
, dconst1
), arg0
);
8435 /* Optimize pow(x,0.5) = sqrt(x). */
8436 if (flag_unsafe_math_optimizations
8437 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8439 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8441 if (sqrtfn
!= NULL_TREE
)
8442 return build_call_expr (sqrtfn
, 1, arg0
);
8445 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8446 if (flag_unsafe_math_optimizations
)
8448 const REAL_VALUE_TYPE dconstroot
8449 = real_value_truncate (TYPE_MODE (type
),
8450 *get_real_const (rv_third
));
8452 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8454 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8455 if (cbrtfn
!= NULL_TREE
)
8456 return build_call_expr (cbrtfn
, 1, arg0
);
8460 /* Check for an integer exponent. */
8461 n
= real_to_integer (&c
);
8462 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
8463 if (real_identical (&c
, &cint
))
8465 /* Attempt to evaluate pow at compile-time. */
8466 if (TREE_CODE (arg0
) == REAL_CST
8467 && !TREE_OVERFLOW (arg0
))
8472 x
= TREE_REAL_CST (arg0
);
8473 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8474 if (flag_unsafe_math_optimizations
|| !inexact
)
8475 return build_real (type
, x
);
8478 /* Strip sign ops from even integer powers. */
8479 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8481 tree narg0
= fold_strip_sign_ops (arg0
);
8483 return build_call_expr (fndecl
, 2, narg0
, arg1
);
8488 if (flag_unsafe_math_optimizations
)
8490 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8492 /* Optimize pow(expN(x),y) = expN(x*y). */
8493 if (BUILTIN_EXPONENT_P (fcode
))
8495 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8496 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8497 arg
= fold_build2 (MULT_EXPR
, type
, arg
, arg1
);
8498 return build_call_expr (expfn
, 1, arg
);
8501 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8502 if (BUILTIN_SQRT_P (fcode
))
8504 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8505 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
8506 build_real (type
, dconsthalf
));
8507 return build_call_expr (fndecl
, 2, narg0
, narg1
);
8510 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8511 if (BUILTIN_CBRT_P (fcode
))
8513 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8514 if (tree_expr_nonnegative_p (arg
))
8516 const REAL_VALUE_TYPE dconstroot
8517 = real_value_truncate (TYPE_MODE (type
),
8518 *get_real_const (rv_third
));
8519 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
8520 build_real (type
, dconstroot
));
8521 return build_call_expr (fndecl
, 2, arg
, narg1
);
8525 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8526 if (fcode
== BUILT_IN_POW
8527 || fcode
== BUILT_IN_POWF
8528 || fcode
== BUILT_IN_POWL
)
8530 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8531 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8532 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg01
, arg1
);
8533 return build_call_expr (fndecl
, 2, arg00
, narg1
);
8540 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8541 Return NULL_TREE if no simplification can be made. */
8543 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED
,
8544 tree arg0
, tree arg1
, tree type
)
8546 if (!validate_arg (arg0
, REAL_TYPE
)
8547 || !validate_arg (arg1
, INTEGER_TYPE
))
8550 /* Optimize pow(1.0,y) = 1.0. */
8551 if (real_onep (arg0
))
8552 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
8554 if (host_integerp (arg1
, 0))
8556 HOST_WIDE_INT c
= TREE_INT_CST_LOW (arg1
);
8558 /* Evaluate powi at compile-time. */
8559 if (TREE_CODE (arg0
) == REAL_CST
8560 && !TREE_OVERFLOW (arg0
))
8563 x
= TREE_REAL_CST (arg0
);
8564 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8565 return build_real (type
, x
);
8568 /* Optimize pow(x,0) = 1.0. */
8570 return omit_one_operand (type
, build_real (type
, dconst1
),
8573 /* Optimize pow(x,1) = x. */
8577 /* Optimize pow(x,-1) = 1.0/x. */
8579 return fold_build2 (RDIV_EXPR
, type
,
8580 build_real (type
, dconst1
), arg0
);
8586 /* A subroutine of fold_builtin to fold the various exponent
8587 functions. Return NULL_TREE if no simplification can be made.
8588 FUNC is the corresponding MPFR exponent function. */
8591 fold_builtin_exponent (tree fndecl
, tree arg
,
8592 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8594 if (validate_arg (arg
, REAL_TYPE
))
8596 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8599 /* Calculate the result when the argument is a constant. */
8600 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8603 /* Optimize expN(logN(x)) = x. */
8604 if (flag_unsafe_math_optimizations
)
8606 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8608 if ((func
== mpfr_exp
8609 && (fcode
== BUILT_IN_LOG
8610 || fcode
== BUILT_IN_LOGF
8611 || fcode
== BUILT_IN_LOGL
))
8612 || (func
== mpfr_exp2
8613 && (fcode
== BUILT_IN_LOG2
8614 || fcode
== BUILT_IN_LOG2F
8615 || fcode
== BUILT_IN_LOG2L
))
8616 || (func
== mpfr_exp10
8617 && (fcode
== BUILT_IN_LOG10
8618 || fcode
== BUILT_IN_LOG10F
8619 || fcode
== BUILT_IN_LOG10L
)))
8620 return fold_convert (type
, CALL_EXPR_ARG (arg
, 0));
8627 /* Return true if VAR is a VAR_DECL or a component thereof. */
8630 var_decl_component_p (tree var
)
8633 while (handled_component_p (inner
))
8634 inner
= TREE_OPERAND (inner
, 0);
8635 return SSA_VAR_P (inner
);
8638 /* Fold function call to builtin memset. Return
8639 NULL_TREE if no simplification can be made. */
8642 fold_builtin_memset (tree dest
, tree c
, tree len
, tree type
, bool ignore
)
8645 unsigned HOST_WIDE_INT length
, cval
;
8647 if (! validate_arg (dest
, POINTER_TYPE
)
8648 || ! validate_arg (c
, INTEGER_TYPE
)
8649 || ! validate_arg (len
, INTEGER_TYPE
))
8652 if (! host_integerp (len
, 1))
8655 /* If the LEN parameter is zero, return DEST. */
8656 if (integer_zerop (len
))
8657 return omit_one_operand (type
, dest
, c
);
8659 if (! host_integerp (c
, 1) || TREE_SIDE_EFFECTS (dest
))
8664 if (TREE_CODE (var
) != ADDR_EXPR
)
8667 var
= TREE_OPERAND (var
, 0);
8668 if (TREE_THIS_VOLATILE (var
))
8671 if (!INTEGRAL_TYPE_P (TREE_TYPE (var
))
8672 && !POINTER_TYPE_P (TREE_TYPE (var
)))
8675 if (! var_decl_component_p (var
))
8678 length
= tree_low_cst (len
, 1);
8679 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var
))) != length
8680 || get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
8684 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8687 if (integer_zerop (c
))
8691 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8694 cval
= tree_low_cst (c
, 1);
8698 cval
|= (cval
<< 31) << 1;
8701 ret
= build_int_cst_type (TREE_TYPE (var
), cval
);
8702 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, ret
);
8706 return omit_one_operand (type
, dest
, ret
);
8709 /* Fold function call to builtin memset. Return
8710 NULL_TREE if no simplification can be made. */
8713 fold_builtin_bzero (tree dest
, tree size
, bool ignore
)
8715 if (! validate_arg (dest
, POINTER_TYPE
)
8716 || ! validate_arg (size
, INTEGER_TYPE
))
8722 /* New argument list transforming bzero(ptr x, int y) to
8723 memset(ptr x, int 0, size_t y). This is done this way
8724 so that if it isn't expanded inline, we fallback to
8725 calling bzero instead of memset. */
8727 return fold_builtin_memset (dest
, integer_zero_node
,
8728 fold_convert (sizetype
, size
),
8729 void_type_node
, ignore
);
8732 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8733 NULL_TREE if no simplification can be made.
8734 If ENDP is 0, return DEST (like memcpy).
8735 If ENDP is 1, return DEST+LEN (like mempcpy).
8736 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8737 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8741 fold_builtin_memory_op (tree dest
, tree src
, tree len
, tree type
, bool ignore
, int endp
)
8743 tree destvar
, srcvar
, expr
;
8745 if (! validate_arg (dest
, POINTER_TYPE
)
8746 || ! validate_arg (src
, POINTER_TYPE
)
8747 || ! validate_arg (len
, INTEGER_TYPE
))
8750 /* If the LEN parameter is zero, return DEST. */
8751 if (integer_zerop (len
))
8752 return omit_one_operand (type
, dest
, src
);
8754 /* If SRC and DEST are the same (and not volatile), return
8755 DEST{,+LEN,+LEN-1}. */
8756 if (operand_equal_p (src
, dest
, 0))
8760 tree srctype
, desttype
;
8763 int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
8764 int dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
8766 /* Both DEST and SRC must be pointer types.
8767 ??? This is what old code did. Is the testing for pointer types
8770 If either SRC is readonly or length is 1, we can use memcpy. */
8771 if (dest_align
&& src_align
8772 && (readonly_data_expr (src
)
8773 || (host_integerp (len
, 1)
8774 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
>=
8775 tree_low_cst (len
, 1)))))
8777 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8780 return build_call_expr (fn
, 3, dest
, src
, len
);
8785 if (!host_integerp (len
, 0))
8788 This logic lose for arguments like (type *)malloc (sizeof (type)),
8789 since we strip the casts of up to VOID return value from malloc.
8790 Perhaps we ought to inherit type from non-VOID argument here? */
8793 srctype
= TREE_TYPE (TREE_TYPE (src
));
8794 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8795 if (!srctype
|| !desttype
8796 || !TYPE_SIZE_UNIT (srctype
)
8797 || !TYPE_SIZE_UNIT (desttype
)
8798 || TREE_CODE (TYPE_SIZE_UNIT (srctype
)) != INTEGER_CST
8799 || TREE_CODE (TYPE_SIZE_UNIT (desttype
)) != INTEGER_CST
8800 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
8801 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8804 if (get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
)
8805 < (int) TYPE_ALIGN (desttype
)
8806 || (get_pointer_alignment (src
, BIGGEST_ALIGNMENT
)
8807 < (int) TYPE_ALIGN (srctype
)))
8811 dest
= builtin_save_expr (dest
);
8813 srcvar
= build_fold_indirect_ref (src
);
8814 if (TREE_THIS_VOLATILE (srcvar
))
8816 if (!tree_int_cst_equal (lang_hooks
.expr_size (srcvar
), len
))
8818 /* With memcpy, it is possible to bypass aliasing rules, so without
8819 this check i.e. execute/20060930-2.c would be misoptimized, because
8820 it use conflicting alias set to hold argument for the memcpy call.
8821 This check is probably unnecessary with -fno-strict-aliasing.
8822 Similarly for destvar. See also PR29286. */
8823 if (!var_decl_component_p (srcvar
)
8824 /* Accept: memcpy (*char_var, "test", 1); that simplify
8826 || is_gimple_min_invariant (srcvar
)
8827 || readonly_data_expr (src
))
8830 destvar
= build_fold_indirect_ref (dest
);
8831 if (TREE_THIS_VOLATILE (destvar
))
8833 if (!tree_int_cst_equal (lang_hooks
.expr_size (destvar
), len
))
8835 if (!var_decl_component_p (destvar
))
8838 if (srctype
== desttype
8839 || (gimple_in_ssa_p (cfun
)
8840 && useless_type_conversion_p (desttype
, srctype
)))
8842 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar
))
8843 || POINTER_TYPE_P (TREE_TYPE (srcvar
)))
8844 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar
))
8845 || POINTER_TYPE_P (TREE_TYPE (destvar
))))
8846 expr
= fold_convert (TREE_TYPE (destvar
), srcvar
);
8848 expr
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (destvar
), srcvar
);
8849 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, expr
);
8855 if (endp
== 0 || endp
== 3)
8856 return omit_one_operand (type
, dest
, expr
);
8862 len
= fold_build2 (MINUS_EXPR
, TREE_TYPE (len
), len
,
8865 dest
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
8866 dest
= fold_convert (type
, dest
);
8868 dest
= omit_one_operand (type
, dest
, expr
);
8872 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8873 If LEN is not NULL, it represents the length of the string to be
8874 copied. Return NULL_TREE if no simplification can be made. */
8877 fold_builtin_strcpy (tree fndecl
, tree dest
, tree src
, tree len
)
8881 if (!validate_arg (dest
, POINTER_TYPE
)
8882 || !validate_arg (src
, POINTER_TYPE
))
8885 /* If SRC and DEST are the same (and not volatile), return DEST. */
8886 if (operand_equal_p (src
, dest
, 0))
8887 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
8892 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8898 len
= c_strlen (src
, 1);
8899 if (! len
|| TREE_SIDE_EFFECTS (len
))
8903 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
8904 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
8905 build_call_expr (fn
, 3, dest
, src
, len
));
8908 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8909 If SLEN is not NULL, it represents the length of the source string.
8910 Return NULL_TREE if no simplification can be made. */
8913 fold_builtin_strncpy (tree fndecl
, tree dest
, tree src
, tree len
, tree slen
)
8917 if (!validate_arg (dest
, POINTER_TYPE
)
8918 || !validate_arg (src
, POINTER_TYPE
)
8919 || !validate_arg (len
, INTEGER_TYPE
))
8922 /* If the LEN parameter is zero, return DEST. */
8923 if (integer_zerop (len
))
8924 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
8926 /* We can't compare slen with len as constants below if len is not a
8928 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
8932 slen
= c_strlen (src
, 1);
8934 /* Now, we must be passed a constant src ptr parameter. */
8935 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
8938 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
8940 /* We do not support simplification of this case, though we do
8941 support it when expanding trees into RTL. */
8942 /* FIXME: generate a call to __builtin_memset. */
8943 if (tree_int_cst_lt (slen
, len
))
8946 /* OK transform into builtin memcpy. */
8947 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8950 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
8951 build_call_expr (fn
, 3, dest
, src
, len
));
8954 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8955 arguments to the call, and TYPE is its return type.
8956 Return NULL_TREE if no simplification can be made. */
8959 fold_builtin_memchr (tree arg1
, tree arg2
, tree len
, tree type
)
8961 if (!validate_arg (arg1
, POINTER_TYPE
)
8962 || !validate_arg (arg2
, INTEGER_TYPE
)
8963 || !validate_arg (len
, INTEGER_TYPE
))
8969 if (TREE_CODE (arg2
) != INTEGER_CST
8970 || !host_integerp (len
, 1))
8973 p1
= c_getstr (arg1
);
8974 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8980 if (target_char_cast (arg2
, &c
))
8983 r
= (char *) memchr (p1
, c
, tree_low_cst (len
, 1));
8986 return build_int_cst (TREE_TYPE (arg1
), 0);
8988 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (arg1
), arg1
,
8990 return fold_convert (type
, tem
);
8996 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8997 Return NULL_TREE if no simplification can be made. */
9000 fold_builtin_memcmp (tree arg1
, tree arg2
, tree len
)
9002 const char *p1
, *p2
;
9004 if (!validate_arg (arg1
, POINTER_TYPE
)
9005 || !validate_arg (arg2
, POINTER_TYPE
)
9006 || !validate_arg (len
, INTEGER_TYPE
))
9009 /* If the LEN parameter is zero, return zero. */
9010 if (integer_zerop (len
))
9011 return omit_two_operands (integer_type_node
, integer_zero_node
,
9014 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9015 if (operand_equal_p (arg1
, arg2
, 0))
9016 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
9018 p1
= c_getstr (arg1
);
9019 p2
= c_getstr (arg2
);
9021 /* If all arguments are constant, and the value of len is not greater
9022 than the lengths of arg1 and arg2, evaluate at compile-time. */
9023 if (host_integerp (len
, 1) && p1
&& p2
9024 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
9025 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
9027 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
9030 return integer_one_node
;
9032 return integer_minus_one_node
;
9034 return integer_zero_node
;
9037 /* If len parameter is one, return an expression corresponding to
9038 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9039 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9041 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9042 tree cst_uchar_ptr_node
9043 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9045 tree ind1
= fold_convert (integer_type_node
,
9046 build1 (INDIRECT_REF
, cst_uchar_node
,
9047 fold_convert (cst_uchar_ptr_node
,
9049 tree ind2
= fold_convert (integer_type_node
,
9050 build1 (INDIRECT_REF
, cst_uchar_node
,
9051 fold_convert (cst_uchar_ptr_node
,
9053 return fold_build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9059 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9060 Return NULL_TREE if no simplification can be made. */
9063 fold_builtin_strcmp (tree arg1
, tree arg2
)
9065 const char *p1
, *p2
;
9067 if (!validate_arg (arg1
, POINTER_TYPE
)
9068 || !validate_arg (arg2
, POINTER_TYPE
))
9071 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9072 if (operand_equal_p (arg1
, arg2
, 0))
9073 return integer_zero_node
;
9075 p1
= c_getstr (arg1
);
9076 p2
= c_getstr (arg2
);
9080 const int i
= strcmp (p1
, p2
);
9082 return integer_minus_one_node
;
9084 return integer_one_node
;
9086 return integer_zero_node
;
9089 /* If the second arg is "", return *(const unsigned char*)arg1. */
9090 if (p2
&& *p2
== '\0')
9092 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9093 tree cst_uchar_ptr_node
9094 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9096 return fold_convert (integer_type_node
,
9097 build1 (INDIRECT_REF
, cst_uchar_node
,
9098 fold_convert (cst_uchar_ptr_node
,
9102 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9103 if (p1
&& *p1
== '\0')
9105 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9106 tree cst_uchar_ptr_node
9107 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9109 tree temp
= fold_convert (integer_type_node
,
9110 build1 (INDIRECT_REF
, cst_uchar_node
,
9111 fold_convert (cst_uchar_ptr_node
,
9113 return fold_build1 (NEGATE_EXPR
, integer_type_node
, temp
);
9119 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9120 Return NULL_TREE if no simplification can be made. */
9123 fold_builtin_strncmp (tree arg1
, tree arg2
, tree len
)
9125 const char *p1
, *p2
;
9127 if (!validate_arg (arg1
, POINTER_TYPE
)
9128 || !validate_arg (arg2
, POINTER_TYPE
)
9129 || !validate_arg (len
, INTEGER_TYPE
))
9132 /* If the LEN parameter is zero, return zero. */
9133 if (integer_zerop (len
))
9134 return omit_two_operands (integer_type_node
, integer_zero_node
,
9137 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9138 if (operand_equal_p (arg1
, arg2
, 0))
9139 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
9141 p1
= c_getstr (arg1
);
9142 p2
= c_getstr (arg2
);
9144 if (host_integerp (len
, 1) && p1
&& p2
)
9146 const int i
= strncmp (p1
, p2
, tree_low_cst (len
, 1));
9148 return integer_one_node
;
9150 return integer_minus_one_node
;
9152 return integer_zero_node
;
9155 /* If the second arg is "", and the length is greater than zero,
9156 return *(const unsigned char*)arg1. */
9157 if (p2
&& *p2
== '\0'
9158 && TREE_CODE (len
) == INTEGER_CST
9159 && tree_int_cst_sgn (len
) == 1)
9161 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9162 tree cst_uchar_ptr_node
9163 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9165 return fold_convert (integer_type_node
,
9166 build1 (INDIRECT_REF
, cst_uchar_node
,
9167 fold_convert (cst_uchar_ptr_node
,
9171 /* If the first arg is "", and the length is greater than zero,
9172 return -*(const unsigned char*)arg2. */
9173 if (p1
&& *p1
== '\0'
9174 && TREE_CODE (len
) == INTEGER_CST
9175 && tree_int_cst_sgn (len
) == 1)
9177 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9178 tree cst_uchar_ptr_node
9179 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9181 tree temp
= fold_convert (integer_type_node
,
9182 build1 (INDIRECT_REF
, cst_uchar_node
,
9183 fold_convert (cst_uchar_ptr_node
,
9185 return fold_build1 (NEGATE_EXPR
, integer_type_node
, temp
);
9188 /* If len parameter is one, return an expression corresponding to
9189 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9190 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9192 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9193 tree cst_uchar_ptr_node
9194 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9196 tree ind1
= fold_convert (integer_type_node
,
9197 build1 (INDIRECT_REF
, cst_uchar_node
,
9198 fold_convert (cst_uchar_ptr_node
,
9200 tree ind2
= fold_convert (integer_type_node
,
9201 build1 (INDIRECT_REF
, cst_uchar_node
,
9202 fold_convert (cst_uchar_ptr_node
,
9204 return fold_build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9210 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9211 ARG. Return NULL_TREE if no simplification can be made. */
9214 fold_builtin_signbit (tree arg
, tree type
)
9218 if (!validate_arg (arg
, REAL_TYPE
))
9221 /* If ARG is a compile-time constant, determine the result. */
9222 if (TREE_CODE (arg
) == REAL_CST
9223 && !TREE_OVERFLOW (arg
))
9227 c
= TREE_REAL_CST (arg
);
9228 temp
= REAL_VALUE_NEGATIVE (c
) ? integer_one_node
: integer_zero_node
;
9229 return fold_convert (type
, temp
);
9232 /* If ARG is non-negative, the result is always zero. */
9233 if (tree_expr_nonnegative_p (arg
))
9234 return omit_one_operand (type
, integer_zero_node
, arg
);
9236 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9237 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
9238 return fold_build2 (LT_EXPR
, type
, arg
,
9239 build_real (TREE_TYPE (arg
), dconst0
));
9244 /* Fold function call to builtin copysign, copysignf or copysignl with
9245 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9249 fold_builtin_copysign (tree fndecl
, tree arg1
, tree arg2
, tree type
)
9253 if (!validate_arg (arg1
, REAL_TYPE
)
9254 || !validate_arg (arg2
, REAL_TYPE
))
9257 /* copysign(X,X) is X. */
9258 if (operand_equal_p (arg1
, arg2
, 0))
9259 return fold_convert (type
, arg1
);
9261 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9262 if (TREE_CODE (arg1
) == REAL_CST
9263 && TREE_CODE (arg2
) == REAL_CST
9264 && !TREE_OVERFLOW (arg1
)
9265 && !TREE_OVERFLOW (arg2
))
9267 REAL_VALUE_TYPE c1
, c2
;
9269 c1
= TREE_REAL_CST (arg1
);
9270 c2
= TREE_REAL_CST (arg2
);
9271 /* c1.sign := c2.sign. */
9272 real_copysign (&c1
, &c2
);
9273 return build_real (type
, c1
);
9276 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9277 Remember to evaluate Y for side-effects. */
9278 if (tree_expr_nonnegative_p (arg2
))
9279 return omit_one_operand (type
,
9280 fold_build1 (ABS_EXPR
, type
, arg1
),
9283 /* Strip sign changing operations for the first argument. */
9284 tem
= fold_strip_sign_ops (arg1
);
9286 return build_call_expr (fndecl
, 2, tem
, arg2
);
9291 /* Fold a call to builtin isascii with argument ARG. */
9294 fold_builtin_isascii (tree arg
)
9296 if (!validate_arg (arg
, INTEGER_TYPE
))
9300 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9301 arg
= build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9302 build_int_cst (NULL_TREE
,
9303 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9304 return fold_build2 (EQ_EXPR
, integer_type_node
,
9305 arg
, integer_zero_node
);
9309 /* Fold a call to builtin toascii with argument ARG. */
9312 fold_builtin_toascii (tree arg
)
9314 if (!validate_arg (arg
, INTEGER_TYPE
))
9317 /* Transform toascii(c) -> (c & 0x7f). */
9318 return fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9319 build_int_cst (NULL_TREE
, 0x7f));
9322 /* Fold a call to builtin isdigit with argument ARG. */
9325 fold_builtin_isdigit (tree arg
)
9327 if (!validate_arg (arg
, INTEGER_TYPE
))
9331 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9332 /* According to the C standard, isdigit is unaffected by locale.
9333 However, it definitely is affected by the target character set. */
9334 unsigned HOST_WIDE_INT target_digit0
9335 = lang_hooks
.to_target_charset ('0');
9337 if (target_digit0
== 0)
9340 arg
= fold_convert (unsigned_type_node
, arg
);
9341 arg
= build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9342 build_int_cst (unsigned_type_node
, target_digit0
));
9343 return fold_build2 (LE_EXPR
, integer_type_node
, arg
,
9344 build_int_cst (unsigned_type_node
, 9));
9348 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9351 fold_builtin_fabs (tree arg
, tree type
)
9353 if (!validate_arg (arg
, REAL_TYPE
))
9356 arg
= fold_convert (type
, arg
);
9357 if (TREE_CODE (arg
) == REAL_CST
)
9358 return fold_abs_const (arg
, type
);
9359 return fold_build1 (ABS_EXPR
, type
, arg
);
9362 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9365 fold_builtin_abs (tree arg
, tree type
)
9367 if (!validate_arg (arg
, INTEGER_TYPE
))
9370 arg
= fold_convert (type
, arg
);
9371 if (TREE_CODE (arg
) == INTEGER_CST
)
9372 return fold_abs_const (arg
, type
);
9373 return fold_build1 (ABS_EXPR
, type
, arg
);
9376 /* Fold a call to builtin fmin or fmax. */
9379 fold_builtin_fmin_fmax (tree arg0
, tree arg1
, tree type
, bool max
)
9381 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9383 /* Calculate the result when the argument is a constant. */
9384 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9389 /* If either argument is NaN, return the other one. Avoid the
9390 transformation if we get (and honor) a signalling NaN. Using
9391 omit_one_operand() ensures we create a non-lvalue. */
9392 if (TREE_CODE (arg0
) == REAL_CST
9393 && real_isnan (&TREE_REAL_CST (arg0
))
9394 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9395 || ! TREE_REAL_CST (arg0
).signalling
))
9396 return omit_one_operand (type
, arg1
, arg0
);
9397 if (TREE_CODE (arg1
) == REAL_CST
9398 && real_isnan (&TREE_REAL_CST (arg1
))
9399 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9400 || ! TREE_REAL_CST (arg1
).signalling
))
9401 return omit_one_operand (type
, arg0
, arg1
);
9403 /* Transform fmin/fmax(x,x) -> x. */
9404 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9405 return omit_one_operand (type
, arg0
, arg1
);
9407 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9408 functions to return the numeric arg if the other one is NaN.
9409 These tree codes don't honor that, so only transform if
9410 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9411 handled, so we don't have to worry about it either. */
9412 if (flag_finite_math_only
)
9413 return fold_build2 ((max
? MAX_EXPR
: MIN_EXPR
), type
,
9414 fold_convert (type
, arg0
),
9415 fold_convert (type
, arg1
));
9420 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9423 fold_builtin_carg (tree arg
, tree type
)
9425 if (validate_arg (arg
, COMPLEX_TYPE
))
9427 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9431 tree new_arg
= builtin_save_expr (arg
);
9432 tree r_arg
= fold_build1 (REALPART_EXPR
, type
, new_arg
);
9433 tree i_arg
= fold_build1 (IMAGPART_EXPR
, type
, new_arg
);
9434 return build_call_expr (atan2_fn
, 2, i_arg
, r_arg
);
9441 /* Fold a call to builtin logb/ilogb. */
9444 fold_builtin_logb (tree arg
, tree rettype
)
9446 if (! validate_arg (arg
, REAL_TYPE
))
9451 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9453 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9459 /* If arg is Inf or NaN and we're logb, return it. */
9460 if (TREE_CODE (rettype
) == REAL_TYPE
)
9461 return fold_convert (rettype
, arg
);
9462 /* Fall through... */
9464 /* Zero may set errno and/or raise an exception for logb, also
9465 for ilogb we don't know FP_ILOGB0. */
9468 /* For normal numbers, proceed iff radix == 2. In GCC,
9469 normalized significands are in the range [0.5, 1.0). We
9470 want the exponent as if they were [1.0, 2.0) so get the
9471 exponent and subtract 1. */
9472 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9473 return fold_convert (rettype
, build_int_cst (NULL_TREE
,
9474 REAL_EXP (value
)-1));
9482 /* Fold a call to builtin significand, if radix == 2. */
9485 fold_builtin_significand (tree arg
, tree rettype
)
9487 if (! validate_arg (arg
, REAL_TYPE
))
9492 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9494 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9501 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9502 return fold_convert (rettype
, arg
);
9504 /* For normal numbers, proceed iff radix == 2. */
9505 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9507 REAL_VALUE_TYPE result
= *value
;
9508 /* In GCC, normalized significands are in the range [0.5,
9509 1.0). We want them to be [1.0, 2.0) so set the
9511 SET_REAL_EXP (&result
, 1);
9512 return build_real (rettype
, result
);
9521 /* Fold a call to builtin frexp, we can assume the base is 2. */
9524 fold_builtin_frexp (tree arg0
, tree arg1
, tree rettype
)
9526 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9531 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9534 arg1
= build_fold_indirect_ref (arg1
);
9536 /* Proceed if a valid pointer type was passed in. */
9537 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9539 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9545 /* For +-0, return (*exp = 0, +-0). */
9546 exp
= integer_zero_node
;
9551 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9552 return omit_one_operand (rettype
, arg0
, arg1
);
9555 /* Since the frexp function always expects base 2, and in
9556 GCC normalized significands are already in the range
9557 [0.5, 1.0), we have exactly what frexp wants. */
9558 REAL_VALUE_TYPE frac_rvt
= *value
;
9559 SET_REAL_EXP (&frac_rvt
, 0);
9560 frac
= build_real (rettype
, frac_rvt
);
9561 exp
= build_int_cst (NULL_TREE
, REAL_EXP (value
));
9568 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9569 arg1
= fold_build2 (MODIFY_EXPR
, rettype
, arg1
, exp
);
9570 TREE_SIDE_EFFECTS (arg1
) = 1;
9571 return fold_build2 (COMPOUND_EXPR
, rettype
, arg1
, frac
);
9577 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9578 then we can assume the base is two. If it's false, then we have to
9579 check the mode of the TYPE parameter in certain cases. */
9582 fold_builtin_load_exponent (tree arg0
, tree arg1
, tree type
, bool ldexp
)
9584 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9589 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9590 if (real_zerop (arg0
) || integer_zerop (arg1
)
9591 || (TREE_CODE (arg0
) == REAL_CST
9592 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9593 return omit_one_operand (type
, arg0
, arg1
);
9595 /* If both arguments are constant, then try to evaluate it. */
9596 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9597 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9598 && host_integerp (arg1
, 0))
9600 /* Bound the maximum adjustment to twice the range of the
9601 mode's valid exponents. Use abs to ensure the range is
9602 positive as a sanity check. */
9603 const long max_exp_adj
= 2 *
9604 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9605 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9607 /* Get the user-requested adjustment. */
9608 const HOST_WIDE_INT req_exp_adj
= tree_low_cst (arg1
, 0);
9610 /* The requested adjustment must be inside this range. This
9611 is a preliminary cap to avoid things like overflow, we
9612 may still fail to compute the result for other reasons. */
9613 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9615 REAL_VALUE_TYPE initial_result
;
9617 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9619 /* Ensure we didn't overflow. */
9620 if (! real_isinf (&initial_result
))
9622 const REAL_VALUE_TYPE trunc_result
9623 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9625 /* Only proceed if the target mode can hold the
9627 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9628 return build_real (type
, trunc_result
);
9637 /* Fold a call to builtin modf. */
9640 fold_builtin_modf (tree arg0
, tree arg1
, tree rettype
)
9642 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9647 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9650 arg1
= build_fold_indirect_ref (arg1
);
9652 /* Proceed if a valid pointer type was passed in. */
9653 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9655 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9656 REAL_VALUE_TYPE trunc
, frac
;
9662 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9663 trunc
= frac
= *value
;
9666 /* For +-Inf, return (*arg1 = arg0, +-0). */
9668 frac
.sign
= value
->sign
;
9672 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9673 real_trunc (&trunc
, VOIDmode
, value
);
9674 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9675 /* If the original number was negative and already
9676 integral, then the fractional part is -0.0. */
9677 if (value
->sign
&& frac
.cl
== rvc_zero
)
9678 frac
.sign
= value
->sign
;
9682 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9683 arg1
= fold_build2 (MODIFY_EXPR
, rettype
, arg1
,
9684 build_real (rettype
, trunc
));
9685 TREE_SIDE_EFFECTS (arg1
) = 1;
9686 return fold_build2 (COMPOUND_EXPR
, rettype
, arg1
,
9687 build_real (rettype
, frac
));
9693 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9694 ARG is the argument for the call. */
9697 fold_builtin_classify (tree fndecl
, tree arg
, int builtin_index
)
9699 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9702 if (!validate_arg (arg
, REAL_TYPE
))
9705 switch (builtin_index
)
9707 case BUILT_IN_ISINF
:
9708 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9709 return omit_one_operand (type
, integer_zero_node
, arg
);
9711 if (TREE_CODE (arg
) == REAL_CST
)
9713 r
= TREE_REAL_CST (arg
);
9714 if (real_isinf (&r
))
9715 return real_compare (GT_EXPR
, &r
, &dconst0
)
9716 ? integer_one_node
: integer_minus_one_node
;
9718 return integer_zero_node
;
9723 case BUILT_IN_ISINF_SIGN
:
9725 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9726 /* In a boolean context, GCC will fold the inner COND_EXPR to
9727 1. So e.g. "if (isinf_sign(x))" would be folded to just
9728 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9729 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9730 tree isinf_fn
= built_in_decls
[BUILT_IN_ISINF
];
9731 tree tmp
= NULL_TREE
;
9733 arg
= builtin_save_expr (arg
);
9735 if (signbit_fn
&& isinf_fn
)
9737 tree signbit_call
= build_call_expr (signbit_fn
, 1, arg
);
9738 tree isinf_call
= build_call_expr (isinf_fn
, 1, arg
);
9740 signbit_call
= fold_build2 (NE_EXPR
, integer_type_node
,
9741 signbit_call
, integer_zero_node
);
9742 isinf_call
= fold_build2 (NE_EXPR
, integer_type_node
,
9743 isinf_call
, integer_zero_node
);
9745 tmp
= fold_build3 (COND_EXPR
, integer_type_node
, signbit_call
,
9746 integer_minus_one_node
, integer_one_node
);
9747 tmp
= fold_build3 (COND_EXPR
, integer_type_node
, isinf_call
, tmp
,
9754 case BUILT_IN_ISFINITE
:
9755 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
9756 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9757 return omit_one_operand (type
, integer_one_node
, arg
);
9759 if (TREE_CODE (arg
) == REAL_CST
)
9761 r
= TREE_REAL_CST (arg
);
9762 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9767 case BUILT_IN_ISNAN
:
9768 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
9769 return omit_one_operand (type
, integer_zero_node
, arg
);
9771 if (TREE_CODE (arg
) == REAL_CST
)
9773 r
= TREE_REAL_CST (arg
);
9774 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9777 arg
= builtin_save_expr (arg
);
9778 return fold_build2 (UNORDERED_EXPR
, type
, arg
, arg
);
9785 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9786 This builtin will generate code to return the appropriate floating
9787 point classification depending on the value of the floating point
9788 number passed in. The possible return values must be supplied as
9789 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9790 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9791 one floating point argument which is "type generic". */
9794 fold_builtin_fpclassify (tree exp
)
9796 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9797 arg
, type
, res
, tmp
;
9798 enum machine_mode mode
;
9802 /* Verify the required arguments in the original call. */
9803 if (!validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
,
9804 INTEGER_TYPE
, INTEGER_TYPE
,
9805 INTEGER_TYPE
, REAL_TYPE
, VOID_TYPE
))
9808 fp_nan
= CALL_EXPR_ARG (exp
, 0);
9809 fp_infinite
= CALL_EXPR_ARG (exp
, 1);
9810 fp_normal
= CALL_EXPR_ARG (exp
, 2);
9811 fp_subnormal
= CALL_EXPR_ARG (exp
, 3);
9812 fp_zero
= CALL_EXPR_ARG (exp
, 4);
9813 arg
= CALL_EXPR_ARG (exp
, 5);
9814 type
= TREE_TYPE (arg
);
9815 mode
= TYPE_MODE (type
);
9816 arg
= builtin_save_expr (fold_build1 (ABS_EXPR
, type
, arg
));
9820 (fabs(x) == Inf ? FP_INFINITE :
9821 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9822 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9824 tmp
= fold_build2 (EQ_EXPR
, integer_type_node
, arg
,
9825 build_real (type
, dconst0
));
9826 res
= fold_build3 (COND_EXPR
, integer_type_node
, tmp
, fp_zero
, fp_subnormal
);
9828 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9829 real_from_string (&r
, buf
);
9830 tmp
= fold_build2 (GE_EXPR
, integer_type_node
, arg
, build_real (type
, r
));
9831 res
= fold_build3 (COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9833 if (HONOR_INFINITIES (mode
))
9836 tmp
= fold_build2 (EQ_EXPR
, integer_type_node
, arg
,
9837 build_real (type
, r
));
9838 res
= fold_build3 (COND_EXPR
, integer_type_node
, tmp
, fp_infinite
, res
);
9841 if (HONOR_NANS (mode
))
9843 tmp
= fold_build2 (ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9844 res
= fold_build3 (COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9850 /* Fold a call to an unordered comparison function such as
9851 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9852 being called and ARG0 and ARG1 are the arguments for the call.
9853 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9854 the opposite of the desired result. UNORDERED_CODE is used
9855 for modes that can hold NaNs and ORDERED_CODE is used for
9859 fold_builtin_unordered_cmp (tree fndecl
, tree arg0
, tree arg1
,
9860 enum tree_code unordered_code
,
9861 enum tree_code ordered_code
)
9863 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9864 enum tree_code code
;
9866 enum tree_code code0
, code1
;
9867 tree cmp_type
= NULL_TREE
;
9869 type0
= TREE_TYPE (arg0
);
9870 type1
= TREE_TYPE (arg1
);
9872 code0
= TREE_CODE (type0
);
9873 code1
= TREE_CODE (type1
);
9875 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9876 /* Choose the wider of two real types. */
9877 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9879 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9881 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9884 arg0
= fold_convert (cmp_type
, arg0
);
9885 arg1
= fold_convert (cmp_type
, arg1
);
9887 if (unordered_code
== UNORDERED_EXPR
)
9889 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9890 return omit_two_operands (type
, integer_zero_node
, arg0
, arg1
);
9891 return fold_build2 (UNORDERED_EXPR
, type
, arg0
, arg1
);
9894 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
9896 return fold_build1 (TRUTH_NOT_EXPR
, type
,
9897 fold_build2 (code
, type
, arg0
, arg1
));
9900 /* Fold a call to built-in function FNDECL with 0 arguments.
9901 IGNORE is true if the result of the function call is ignored. This
9902 function returns NULL_TREE if no simplification was possible. */
9905 fold_builtin_0 (tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
9907 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9908 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9911 CASE_FLT_FN (BUILT_IN_INF
):
9912 case BUILT_IN_INFD32
:
9913 case BUILT_IN_INFD64
:
9914 case BUILT_IN_INFD128
:
9915 return fold_builtin_inf (type
, true);
9917 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9918 return fold_builtin_inf (type
, false);
9920 case BUILT_IN_CLASSIFY_TYPE
:
9921 return fold_builtin_classify_type (NULL_TREE
);
9929 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9930 IGNORE is true if the result of the function call is ignored. This
9931 function returns NULL_TREE if no simplification was possible. */
9934 fold_builtin_1 (tree fndecl
, tree arg0
, bool ignore
)
9936 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9937 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9941 case BUILT_IN_CONSTANT_P
:
9943 tree val
= fold_builtin_constant_p (arg0
);
9945 /* Gimplification will pull the CALL_EXPR for the builtin out of
9946 an if condition. When not optimizing, we'll not CSE it back.
9947 To avoid link error types of regressions, return false now. */
9948 if (!val
&& !optimize
)
9949 val
= integer_zero_node
;
9954 case BUILT_IN_CLASSIFY_TYPE
:
9955 return fold_builtin_classify_type (arg0
);
9957 case BUILT_IN_STRLEN
:
9958 return fold_builtin_strlen (arg0
);
9960 CASE_FLT_FN (BUILT_IN_FABS
):
9961 return fold_builtin_fabs (arg0
, type
);
9965 case BUILT_IN_LLABS
:
9966 case BUILT_IN_IMAXABS
:
9967 return fold_builtin_abs (arg0
, type
);
9969 CASE_FLT_FN (BUILT_IN_CONJ
):
9970 if (validate_arg (arg0
, COMPLEX_TYPE
))
9971 return fold_build1 (CONJ_EXPR
, type
, arg0
);
9974 CASE_FLT_FN (BUILT_IN_CREAL
):
9975 if (validate_arg (arg0
, COMPLEX_TYPE
))
9976 return non_lvalue (fold_build1 (REALPART_EXPR
, type
, arg0
));;
9979 CASE_FLT_FN (BUILT_IN_CIMAG
):
9980 if (validate_arg (arg0
, COMPLEX_TYPE
))
9981 return non_lvalue (fold_build1 (IMAGPART_EXPR
, type
, arg0
));
9984 CASE_FLT_FN (BUILT_IN_CCOS
):
9985 CASE_FLT_FN (BUILT_IN_CCOSH
):
9986 /* These functions are "even", i.e. f(x) == f(-x). */
9987 if (validate_arg (arg0
, COMPLEX_TYPE
))
9989 tree narg
= fold_strip_sign_ops (arg0
);
9991 return build_call_expr (fndecl
, 1, narg
);
9995 CASE_FLT_FN (BUILT_IN_CABS
):
9996 return fold_builtin_cabs (arg0
, type
, fndecl
);
9998 CASE_FLT_FN (BUILT_IN_CARG
):
9999 return fold_builtin_carg (arg0
, type
);
10001 CASE_FLT_FN (BUILT_IN_SQRT
):
10002 return fold_builtin_sqrt (arg0
, type
);
10004 CASE_FLT_FN (BUILT_IN_CBRT
):
10005 return fold_builtin_cbrt (arg0
, type
);
10007 CASE_FLT_FN (BUILT_IN_ASIN
):
10008 if (validate_arg (arg0
, REAL_TYPE
))
10009 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10010 &dconstm1
, &dconst1
, true);
10013 CASE_FLT_FN (BUILT_IN_ACOS
):
10014 if (validate_arg (arg0
, REAL_TYPE
))
10015 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10016 &dconstm1
, &dconst1
, true);
10019 CASE_FLT_FN (BUILT_IN_ATAN
):
10020 if (validate_arg (arg0
, REAL_TYPE
))
10021 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10024 CASE_FLT_FN (BUILT_IN_ASINH
):
10025 if (validate_arg (arg0
, REAL_TYPE
))
10026 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10029 CASE_FLT_FN (BUILT_IN_ACOSH
):
10030 if (validate_arg (arg0
, REAL_TYPE
))
10031 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10032 &dconst1
, NULL
, true);
10035 CASE_FLT_FN (BUILT_IN_ATANH
):
10036 if (validate_arg (arg0
, REAL_TYPE
))
10037 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10038 &dconstm1
, &dconst1
, false);
10041 CASE_FLT_FN (BUILT_IN_SIN
):
10042 if (validate_arg (arg0
, REAL_TYPE
))
10043 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10046 CASE_FLT_FN (BUILT_IN_COS
):
10047 return fold_builtin_cos (arg0
, type
, fndecl
);
10050 CASE_FLT_FN (BUILT_IN_TAN
):
10051 return fold_builtin_tan (arg0
, type
);
10053 CASE_FLT_FN (BUILT_IN_CEXP
):
10054 return fold_builtin_cexp (arg0
, type
);
10056 CASE_FLT_FN (BUILT_IN_CEXPI
):
10057 if (validate_arg (arg0
, REAL_TYPE
))
10058 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10061 CASE_FLT_FN (BUILT_IN_SINH
):
10062 if (validate_arg (arg0
, REAL_TYPE
))
10063 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10066 CASE_FLT_FN (BUILT_IN_COSH
):
10067 return fold_builtin_cosh (arg0
, type
, fndecl
);
10069 CASE_FLT_FN (BUILT_IN_TANH
):
10070 if (validate_arg (arg0
, REAL_TYPE
))
10071 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10074 CASE_FLT_FN (BUILT_IN_ERF
):
10075 if (validate_arg (arg0
, REAL_TYPE
))
10076 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10079 CASE_FLT_FN (BUILT_IN_ERFC
):
10080 if (validate_arg (arg0
, REAL_TYPE
))
10081 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10084 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10085 if (validate_arg (arg0
, REAL_TYPE
))
10086 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10089 CASE_FLT_FN (BUILT_IN_EXP
):
10090 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp
);
10092 CASE_FLT_FN (BUILT_IN_EXP2
):
10093 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp2
);
10095 CASE_FLT_FN (BUILT_IN_EXP10
):
10096 CASE_FLT_FN (BUILT_IN_POW10
):
10097 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp10
);
10099 CASE_FLT_FN (BUILT_IN_EXPM1
):
10100 if (validate_arg (arg0
, REAL_TYPE
))
10101 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10104 CASE_FLT_FN (BUILT_IN_LOG
):
10105 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log
);
10107 CASE_FLT_FN (BUILT_IN_LOG2
):
10108 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log2
);
10110 CASE_FLT_FN (BUILT_IN_LOG10
):
10111 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log10
);
10113 CASE_FLT_FN (BUILT_IN_LOG1P
):
10114 if (validate_arg (arg0
, REAL_TYPE
))
10115 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10116 &dconstm1
, NULL
, false);
10119 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10120 CASE_FLT_FN (BUILT_IN_J0
):
10121 if (validate_arg (arg0
, REAL_TYPE
))
10122 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10126 CASE_FLT_FN (BUILT_IN_J1
):
10127 if (validate_arg (arg0
, REAL_TYPE
))
10128 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10132 CASE_FLT_FN (BUILT_IN_Y0
):
10133 if (validate_arg (arg0
, REAL_TYPE
))
10134 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10135 &dconst0
, NULL
, false);
10138 CASE_FLT_FN (BUILT_IN_Y1
):
10139 if (validate_arg (arg0
, REAL_TYPE
))
10140 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10141 &dconst0
, NULL
, false);
10145 CASE_FLT_FN (BUILT_IN_NAN
):
10146 case BUILT_IN_NAND32
:
10147 case BUILT_IN_NAND64
:
10148 case BUILT_IN_NAND128
:
10149 return fold_builtin_nan (arg0
, type
, true);
10151 CASE_FLT_FN (BUILT_IN_NANS
):
10152 return fold_builtin_nan (arg0
, type
, false);
10154 CASE_FLT_FN (BUILT_IN_FLOOR
):
10155 return fold_builtin_floor (fndecl
, arg0
);
10157 CASE_FLT_FN (BUILT_IN_CEIL
):
10158 return fold_builtin_ceil (fndecl
, arg0
);
10160 CASE_FLT_FN (BUILT_IN_TRUNC
):
10161 return fold_builtin_trunc (fndecl
, arg0
);
10163 CASE_FLT_FN (BUILT_IN_ROUND
):
10164 return fold_builtin_round (fndecl
, arg0
);
10166 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10167 CASE_FLT_FN (BUILT_IN_RINT
):
10168 return fold_trunc_transparent_mathfn (fndecl
, arg0
);
10170 CASE_FLT_FN (BUILT_IN_LCEIL
):
10171 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10172 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10173 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10174 CASE_FLT_FN (BUILT_IN_LROUND
):
10175 CASE_FLT_FN (BUILT_IN_LLROUND
):
10176 return fold_builtin_int_roundingfn (fndecl
, arg0
);
10178 CASE_FLT_FN (BUILT_IN_LRINT
):
10179 CASE_FLT_FN (BUILT_IN_LLRINT
):
10180 return fold_fixed_mathfn (fndecl
, arg0
);
10182 case BUILT_IN_BSWAP32
:
10183 case BUILT_IN_BSWAP64
:
10184 return fold_builtin_bswap (fndecl
, arg0
);
10186 CASE_INT_FN (BUILT_IN_FFS
):
10187 CASE_INT_FN (BUILT_IN_CLZ
):
10188 CASE_INT_FN (BUILT_IN_CTZ
):
10189 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10190 CASE_INT_FN (BUILT_IN_PARITY
):
10191 return fold_builtin_bitop (fndecl
, arg0
);
10193 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10194 return fold_builtin_signbit (arg0
, type
);
10196 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10197 return fold_builtin_significand (arg0
, type
);
10199 CASE_FLT_FN (BUILT_IN_ILOGB
):
10200 CASE_FLT_FN (BUILT_IN_LOGB
):
10201 return fold_builtin_logb (arg0
, type
);
10203 case BUILT_IN_ISASCII
:
10204 return fold_builtin_isascii (arg0
);
10206 case BUILT_IN_TOASCII
:
10207 return fold_builtin_toascii (arg0
);
10209 case BUILT_IN_ISDIGIT
:
10210 return fold_builtin_isdigit (arg0
);
10212 CASE_FLT_FN (BUILT_IN_FINITE
):
10213 case BUILT_IN_FINITED32
:
10214 case BUILT_IN_FINITED64
:
10215 case BUILT_IN_FINITED128
:
10216 case BUILT_IN_ISFINITE
:
10217 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISFINITE
);
10219 CASE_FLT_FN (BUILT_IN_ISINF
):
10220 case BUILT_IN_ISINFD32
:
10221 case BUILT_IN_ISINFD64
:
10222 case BUILT_IN_ISINFD128
:
10223 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISINF
);
10225 case BUILT_IN_ISINF_SIGN
:
10226 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10228 CASE_FLT_FN (BUILT_IN_ISNAN
):
10229 case BUILT_IN_ISNAND32
:
10230 case BUILT_IN_ISNAND64
:
10231 case BUILT_IN_ISNAND128
:
10232 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISNAN
);
10234 case BUILT_IN_PRINTF
:
10235 case BUILT_IN_PRINTF_UNLOCKED
:
10236 case BUILT_IN_VPRINTF
:
10237 return fold_builtin_printf (fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
10247 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10248 IGNORE is true if the result of the function call is ignored. This
10249 function returns NULL_TREE if no simplification was possible. */
10252 fold_builtin_2 (tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
10254 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10255 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10259 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10260 CASE_FLT_FN (BUILT_IN_JN
):
10261 if (validate_arg (arg0
, INTEGER_TYPE
)
10262 && validate_arg (arg1
, REAL_TYPE
))
10263 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10266 CASE_FLT_FN (BUILT_IN_YN
):
10267 if (validate_arg (arg0
, INTEGER_TYPE
)
10268 && validate_arg (arg1
, REAL_TYPE
))
10269 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10273 CASE_FLT_FN (BUILT_IN_DREM
):
10274 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10275 if (validate_arg (arg0
, REAL_TYPE
)
10276 && validate_arg(arg1
, REAL_TYPE
))
10277 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10280 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10281 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10282 if (validate_arg (arg0
, REAL_TYPE
)
10283 && validate_arg(arg1
, POINTER_TYPE
))
10284 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10288 CASE_FLT_FN (BUILT_IN_ATAN2
):
10289 if (validate_arg (arg0
, REAL_TYPE
)
10290 && validate_arg(arg1
, REAL_TYPE
))
10291 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10294 CASE_FLT_FN (BUILT_IN_FDIM
):
10295 if (validate_arg (arg0
, REAL_TYPE
)
10296 && validate_arg(arg1
, REAL_TYPE
))
10297 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10300 CASE_FLT_FN (BUILT_IN_HYPOT
):
10301 return fold_builtin_hypot (fndecl
, arg0
, arg1
, type
);
10303 CASE_FLT_FN (BUILT_IN_LDEXP
):
10304 return fold_builtin_load_exponent (arg0
, arg1
, type
, /*ldexp=*/true);
10305 CASE_FLT_FN (BUILT_IN_SCALBN
):
10306 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10307 return fold_builtin_load_exponent (arg0
, arg1
, type
, /*ldexp=*/false);
10309 CASE_FLT_FN (BUILT_IN_FREXP
):
10310 return fold_builtin_frexp (arg0
, arg1
, type
);
10312 CASE_FLT_FN (BUILT_IN_MODF
):
10313 return fold_builtin_modf (arg0
, arg1
, type
);
10315 case BUILT_IN_BZERO
:
10316 return fold_builtin_bzero (arg0
, arg1
, ignore
);
10318 case BUILT_IN_FPUTS
:
10319 return fold_builtin_fputs (arg0
, arg1
, ignore
, false, NULL_TREE
);
10321 case BUILT_IN_FPUTS_UNLOCKED
:
10322 return fold_builtin_fputs (arg0
, arg1
, ignore
, true, NULL_TREE
);
10324 case BUILT_IN_STRSTR
:
10325 return fold_builtin_strstr (arg0
, arg1
, type
);
10327 case BUILT_IN_STRCAT
:
10328 return fold_builtin_strcat (arg0
, arg1
);
10330 case BUILT_IN_STRSPN
:
10331 return fold_builtin_strspn (arg0
, arg1
);
10333 case BUILT_IN_STRCSPN
:
10334 return fold_builtin_strcspn (arg0
, arg1
);
10336 case BUILT_IN_STRCHR
:
10337 case BUILT_IN_INDEX
:
10338 return fold_builtin_strchr (arg0
, arg1
, type
);
10340 case BUILT_IN_STRRCHR
:
10341 case BUILT_IN_RINDEX
:
10342 return fold_builtin_strrchr (arg0
, arg1
, type
);
10344 case BUILT_IN_STRCPY
:
10345 return fold_builtin_strcpy (fndecl
, arg0
, arg1
, NULL_TREE
);
10347 case BUILT_IN_STRCMP
:
10348 return fold_builtin_strcmp (arg0
, arg1
);
10350 case BUILT_IN_STRPBRK
:
10351 return fold_builtin_strpbrk (arg0
, arg1
, type
);
10353 case BUILT_IN_EXPECT
:
10354 return fold_builtin_expect (arg0
, arg1
);
10356 CASE_FLT_FN (BUILT_IN_POW
):
10357 return fold_builtin_pow (fndecl
, arg0
, arg1
, type
);
10359 CASE_FLT_FN (BUILT_IN_POWI
):
10360 return fold_builtin_powi (fndecl
, arg0
, arg1
, type
);
10362 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10363 return fold_builtin_copysign (fndecl
, arg0
, arg1
, type
);
10365 CASE_FLT_FN (BUILT_IN_FMIN
):
10366 return fold_builtin_fmin_fmax (arg0
, arg1
, type
, /*max=*/false);
10368 CASE_FLT_FN (BUILT_IN_FMAX
):
10369 return fold_builtin_fmin_fmax (arg0
, arg1
, type
, /*max=*/true);
10371 case BUILT_IN_ISGREATER
:
10372 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10373 case BUILT_IN_ISGREATEREQUAL
:
10374 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10375 case BUILT_IN_ISLESS
:
10376 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10377 case BUILT_IN_ISLESSEQUAL
:
10378 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10379 case BUILT_IN_ISLESSGREATER
:
10380 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10381 case BUILT_IN_ISUNORDERED
:
10382 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNORDERED_EXPR
,
10385 /* We do the folding for va_start in the expander. */
10386 case BUILT_IN_VA_START
:
10389 case BUILT_IN_SPRINTF
:
10390 return fold_builtin_sprintf (arg0
, arg1
, NULL_TREE
, ignore
);
10392 case BUILT_IN_OBJECT_SIZE
:
10393 return fold_builtin_object_size (arg0
, arg1
);
10395 case BUILT_IN_PRINTF
:
10396 case BUILT_IN_PRINTF_UNLOCKED
:
10397 case BUILT_IN_VPRINTF
:
10398 return fold_builtin_printf (fndecl
, arg0
, arg1
, ignore
, fcode
);
10400 case BUILT_IN_PRINTF_CHK
:
10401 case BUILT_IN_VPRINTF_CHK
:
10402 if (!validate_arg (arg0
, INTEGER_TYPE
)
10403 || TREE_SIDE_EFFECTS (arg0
))
10406 return fold_builtin_printf (fndecl
, arg1
, NULL_TREE
, ignore
, fcode
);
10409 case BUILT_IN_FPRINTF
:
10410 case BUILT_IN_FPRINTF_UNLOCKED
:
10411 case BUILT_IN_VFPRINTF
:
10412 return fold_builtin_fprintf (fndecl
, arg0
, arg1
, NULL_TREE
,
10421 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10422 and ARG2. IGNORE is true if the result of the function call is ignored.
10423 This function returns NULL_TREE if no simplification was possible. */
10426 fold_builtin_3 (tree fndecl
, tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10428 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10429 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10433 CASE_FLT_FN (BUILT_IN_SINCOS
):
10434 return fold_builtin_sincos (arg0
, arg1
, arg2
);
10436 CASE_FLT_FN (BUILT_IN_FMA
):
10437 if (validate_arg (arg0
, REAL_TYPE
)
10438 && validate_arg(arg1
, REAL_TYPE
)
10439 && validate_arg(arg2
, REAL_TYPE
))
10440 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
10443 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10444 CASE_FLT_FN (BUILT_IN_REMQUO
):
10445 if (validate_arg (arg0
, REAL_TYPE
)
10446 && validate_arg(arg1
, REAL_TYPE
)
10447 && validate_arg(arg2
, POINTER_TYPE
))
10448 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10452 case BUILT_IN_MEMSET
:
10453 return fold_builtin_memset (arg0
, arg1
, arg2
, type
, ignore
);
10455 case BUILT_IN_BCOPY
:
10456 return fold_builtin_memory_op (arg1
, arg0
, arg2
, void_type_node
, true, /*endp=*/3);
10458 case BUILT_IN_MEMCPY
:
10459 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/0);
10461 case BUILT_IN_MEMPCPY
:
10462 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/1);
10464 case BUILT_IN_MEMMOVE
:
10465 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/3);
10467 case BUILT_IN_STRNCAT
:
10468 return fold_builtin_strncat (arg0
, arg1
, arg2
);
10470 case BUILT_IN_STRNCPY
:
10471 return fold_builtin_strncpy (fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
10473 case BUILT_IN_STRNCMP
:
10474 return fold_builtin_strncmp (arg0
, arg1
, arg2
);
10476 case BUILT_IN_MEMCHR
:
10477 return fold_builtin_memchr (arg0
, arg1
, arg2
, type
);
10479 case BUILT_IN_BCMP
:
10480 case BUILT_IN_MEMCMP
:
10481 return fold_builtin_memcmp (arg0
, arg1
, arg2
);;
10483 case BUILT_IN_SPRINTF
:
10484 return fold_builtin_sprintf (arg0
, arg1
, arg2
, ignore
);
10486 case BUILT_IN_STRCPY_CHK
:
10487 case BUILT_IN_STPCPY_CHK
:
10488 return fold_builtin_stxcpy_chk (fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
10491 case BUILT_IN_STRCAT_CHK
:
10492 return fold_builtin_strcat_chk (fndecl
, arg0
, arg1
, arg2
);
10494 case BUILT_IN_PRINTF_CHK
:
10495 case BUILT_IN_VPRINTF_CHK
:
10496 if (!validate_arg (arg0
, INTEGER_TYPE
)
10497 || TREE_SIDE_EFFECTS (arg0
))
10500 return fold_builtin_printf (fndecl
, arg1
, arg2
, ignore
, fcode
);
10503 case BUILT_IN_FPRINTF
:
10504 case BUILT_IN_FPRINTF_UNLOCKED
:
10505 case BUILT_IN_VFPRINTF
:
10506 return fold_builtin_fprintf (fndecl
, arg0
, arg1
, arg2
, ignore
, fcode
);
10508 case BUILT_IN_FPRINTF_CHK
:
10509 case BUILT_IN_VFPRINTF_CHK
:
10510 if (!validate_arg (arg1
, INTEGER_TYPE
)
10511 || TREE_SIDE_EFFECTS (arg1
))
10514 return fold_builtin_fprintf (fndecl
, arg0
, arg2
, NULL_TREE
,
10523 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10524 ARG2, and ARG3. IGNORE is true if the result of the function call is
10525 ignored. This function returns NULL_TREE if no simplification was
10529 fold_builtin_4 (tree fndecl
, tree arg0
, tree arg1
, tree arg2
, tree arg3
,
10532 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10536 case BUILT_IN_MEMCPY_CHK
:
10537 case BUILT_IN_MEMPCPY_CHK
:
10538 case BUILT_IN_MEMMOVE_CHK
:
10539 case BUILT_IN_MEMSET_CHK
:
10540 return fold_builtin_memory_chk (fndecl
, arg0
, arg1
, arg2
, arg3
,
10542 DECL_FUNCTION_CODE (fndecl
));
10544 case BUILT_IN_STRNCPY_CHK
:
10545 return fold_builtin_strncpy_chk (arg0
, arg1
, arg2
, arg3
, NULL_TREE
);
10547 case BUILT_IN_STRNCAT_CHK
:
10548 return fold_builtin_strncat_chk (fndecl
, arg0
, arg1
, arg2
, arg3
);
10550 case BUILT_IN_FPRINTF_CHK
:
10551 case BUILT_IN_VFPRINTF_CHK
:
10552 if (!validate_arg (arg1
, INTEGER_TYPE
)
10553 || TREE_SIDE_EFFECTS (arg1
))
10556 return fold_builtin_fprintf (fndecl
, arg0
, arg2
, arg3
,
10566 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10567 arguments, where NARGS <= 4. IGNORE is true if the result of the
10568 function call is ignored. This function returns NULL_TREE if no
10569 simplification was possible. Note that this only folds builtins with
10570 fixed argument patterns. Foldings that do varargs-to-varargs
10571 transformations, or that match calls with more than 4 arguments,
10572 need to be handled with fold_builtin_varargs instead. */
10574 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10577 fold_builtin_n (tree fndecl
, tree
*args
, int nargs
, bool ignore
)
10579 tree ret
= NULL_TREE
;
10584 ret
= fold_builtin_0 (fndecl
, ignore
);
10587 ret
= fold_builtin_1 (fndecl
, args
[0], ignore
);
10590 ret
= fold_builtin_2 (fndecl
, args
[0], args
[1], ignore
);
10593 ret
= fold_builtin_3 (fndecl
, args
[0], args
[1], args
[2], ignore
);
10596 ret
= fold_builtin_4 (fndecl
, args
[0], args
[1], args
[2], args
[3],
10604 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10605 TREE_NO_WARNING (ret
) = 1;
10611 /* Builtins with folding operations that operate on "..." arguments
10612 need special handling; we need to store the arguments in a convenient
10613 data structure before attempting any folding. Fortunately there are
10614 only a few builtins that fall into this category. FNDECL is the
10615 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10616 result of the function call is ignored. */
10619 fold_builtin_varargs (tree fndecl
, tree exp
, bool ignore ATTRIBUTE_UNUSED
)
10621 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10622 tree ret
= NULL_TREE
;
10626 case BUILT_IN_SPRINTF_CHK
:
10627 case BUILT_IN_VSPRINTF_CHK
:
10628 ret
= fold_builtin_sprintf_chk (exp
, fcode
);
10631 case BUILT_IN_SNPRINTF_CHK
:
10632 case BUILT_IN_VSNPRINTF_CHK
:
10633 ret
= fold_builtin_snprintf_chk (exp
, NULL_TREE
, fcode
);
10636 case BUILT_IN_FPCLASSIFY
:
10637 ret
= fold_builtin_fpclassify (exp
);
10645 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10646 TREE_NO_WARNING (ret
) = 1;
10652 /* A wrapper function for builtin folding that prevents warnings for
10653 "statement without effect" and the like, caused by removing the
10654 call node earlier than the warning is generated. */
10657 fold_call_expr (tree exp
, bool ignore
)
10659 tree ret
= NULL_TREE
;
10660 tree fndecl
= get_callee_fndecl (exp
);
10662 && TREE_CODE (fndecl
) == FUNCTION_DECL
10663 && DECL_BUILT_IN (fndecl
)
10664 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10665 yet. Defer folding until we see all the arguments
10666 (after inlining). */
10667 && !CALL_EXPR_VA_ARG_PACK (exp
))
10669 int nargs
= call_expr_nargs (exp
);
10671 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10672 instead last argument is __builtin_va_arg_pack (). Defer folding
10673 even in that case, until arguments are finalized. */
10674 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10676 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10678 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10679 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10680 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10684 /* FIXME: Don't use a list in this interface. */
10685 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10686 return targetm
.fold_builtin (fndecl
, CALL_EXPR_ARGS (exp
), ignore
);
10689 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10691 tree
*args
= CALL_EXPR_ARGP (exp
);
10692 ret
= fold_builtin_n (fndecl
, args
, nargs
, ignore
);
10695 ret
= fold_builtin_varargs (fndecl
, exp
, ignore
);
10698 /* Propagate location information from original call to
10699 expansion of builtin. Otherwise things like
10700 maybe_emit_chk_warning, that operate on the expansion
10701 of a builtin, will use the wrong location information. */
10702 if (CAN_HAVE_LOCATION_P (exp
) && EXPR_HAS_LOCATION (exp
))
10704 tree realret
= ret
;
10705 if (TREE_CODE (ret
) == NOP_EXPR
)
10706 realret
= TREE_OPERAND (ret
, 0);
10707 if (CAN_HAVE_LOCATION_P (realret
)
10708 && !EXPR_HAS_LOCATION (realret
))
10709 SET_EXPR_LOCATION (realret
, EXPR_LOCATION (exp
));
10719 /* Conveniently construct a function call expression. FNDECL names the
10720 function to be called and ARGLIST is a TREE_LIST of arguments. */
10723 build_function_call_expr (tree fndecl
, tree arglist
)
10725 tree fntype
= TREE_TYPE (fndecl
);
10726 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10727 int n
= list_length (arglist
);
10728 tree
*argarray
= (tree
*) alloca (n
* sizeof (tree
));
10731 for (i
= 0; i
< n
; i
++, arglist
= TREE_CHAIN (arglist
))
10732 argarray
[i
] = TREE_VALUE (arglist
);
10733 return fold_builtin_call_array (TREE_TYPE (fntype
), fn
, n
, argarray
);
10736 /* Conveniently construct a function call expression. FNDECL names the
10737 function to be called, N is the number of arguments, and the "..."
10738 parameters are the argument expressions. */
10741 build_call_expr (tree fndecl
, int n
, ...)
10744 tree fntype
= TREE_TYPE (fndecl
);
10745 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10746 tree
*argarray
= (tree
*) alloca (n
* sizeof (tree
));
10750 for (i
= 0; i
< n
; i
++)
10751 argarray
[i
] = va_arg (ap
, tree
);
10753 return fold_builtin_call_array (TREE_TYPE (fntype
), fn
, n
, argarray
);
10756 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10757 N arguments are passed in the array ARGARRAY. */
10760 fold_builtin_call_array (tree type
,
10765 tree ret
= NULL_TREE
;
10769 if (TREE_CODE (fn
) == ADDR_EXPR
)
10771 tree fndecl
= TREE_OPERAND (fn
, 0);
10772 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10773 && DECL_BUILT_IN (fndecl
))
10775 /* If last argument is __builtin_va_arg_pack (), arguments to this
10776 function are not finalized yet. Defer folding until they are. */
10777 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10779 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10781 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10782 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10783 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10784 return build_call_array (type
, fn
, n
, argarray
);
10786 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10788 tree arglist
= NULL_TREE
;
10789 for (i
= n
- 1; i
>= 0; i
--)
10790 arglist
= tree_cons (NULL_TREE
, argarray
[i
], arglist
);
10791 ret
= targetm
.fold_builtin (fndecl
, arglist
, false);
10795 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10797 /* First try the transformations that don't require consing up
10799 ret
= fold_builtin_n (fndecl
, argarray
, n
, false);
10804 /* If we got this far, we need to build an exp. */
10805 exp
= build_call_array (type
, fn
, n
, argarray
);
10806 ret
= fold_builtin_varargs (fndecl
, exp
, false);
10807 return ret
? ret
: exp
;
10811 return build_call_array (type
, fn
, n
, argarray
);
10814 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10815 along with N new arguments specified as the "..." parameters. SKIP
10816 is the number of arguments in EXP to be omitted. This function is used
10817 to do varargs-to-varargs transformations. */
10820 rewrite_call_expr (tree exp
, int skip
, tree fndecl
, int n
, ...)
10822 int oldnargs
= call_expr_nargs (exp
);
10823 int nargs
= oldnargs
- skip
+ n
;
10824 tree fntype
= TREE_TYPE (fndecl
);
10825 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10833 buffer
= XALLOCAVEC (tree
, nargs
);
10835 for (i
= 0; i
< n
; i
++)
10836 buffer
[i
] = va_arg (ap
, tree
);
10838 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10839 buffer
[i
] = CALL_EXPR_ARG (exp
, j
);
10842 buffer
= CALL_EXPR_ARGP (exp
) + skip
;
10844 return fold (build_call_array (TREE_TYPE (exp
), fn
, nargs
, buffer
));
10847 /* Validate a single argument ARG against a tree code CODE representing
10851 validate_arg (const_tree arg
, enum tree_code code
)
10855 else if (code
== POINTER_TYPE
)
10856 return POINTER_TYPE_P (TREE_TYPE (arg
));
10857 else if (code
== INTEGER_TYPE
)
10858 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10859 return code
== TREE_CODE (TREE_TYPE (arg
));
10862 /* This function validates the types of a function call argument list
10863 against a specified list of tree_codes. If the last specifier is a 0,
10864 that represents an ellipses, otherwise the last specifier must be a
10867 This is the GIMPLE version of validate_arglist. Eventually we want to
10868 completely convert builtins.c to work from GIMPLEs and the tree based
10869 validate_arglist will then be removed. */
10872 validate_gimple_arglist (const_gimple call
, ...)
10874 enum tree_code code
;
10880 va_start (ap
, call
);
10885 code
= va_arg (ap
, enum tree_code
);
10889 /* This signifies an ellipses, any further arguments are all ok. */
10893 /* This signifies an endlink, if no arguments remain, return
10894 true, otherwise return false. */
10895 res
= (i
== gimple_call_num_args (call
));
10898 /* If no parameters remain or the parameter's code does not
10899 match the specified code, return false. Otherwise continue
10900 checking any remaining arguments. */
10901 arg
= gimple_call_arg (call
, i
++);
10902 if (!validate_arg (arg
, code
))
10909 /* We need gotos here since we can only have one VA_CLOSE in a
10917 /* This function validates the types of a function call argument list
10918 against a specified list of tree_codes. If the last specifier is a 0,
10919 that represents an ellipses, otherwise the last specifier must be a
10923 validate_arglist (const_tree callexpr
, ...)
10925 enum tree_code code
;
10928 const_call_expr_arg_iterator iter
;
10931 va_start (ap
, callexpr
);
10932 init_const_call_expr_arg_iterator (callexpr
, &iter
);
10936 code
= va_arg (ap
, enum tree_code
);
10940 /* This signifies an ellipses, any further arguments are all ok. */
10944 /* This signifies an endlink, if no arguments remain, return
10945 true, otherwise return false. */
10946 res
= !more_const_call_expr_args_p (&iter
);
10949 /* If no parameters remain or the parameter's code does not
10950 match the specified code, return false. Otherwise continue
10951 checking any remaining arguments. */
10952 arg
= next_const_call_expr_arg (&iter
);
10953 if (!validate_arg (arg
, code
))
10960 /* We need gotos here since we can only have one VA_CLOSE in a
10968 /* Default target-specific builtin expander that does nothing. */
10971 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10972 rtx target ATTRIBUTE_UNUSED
,
10973 rtx subtarget ATTRIBUTE_UNUSED
,
10974 enum machine_mode mode ATTRIBUTE_UNUSED
,
10975 int ignore ATTRIBUTE_UNUSED
)
10980 /* Returns true is EXP represents data that would potentially reside
10981 in a readonly section. */
10984 readonly_data_expr (tree exp
)
10988 if (TREE_CODE (exp
) != ADDR_EXPR
)
10991 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10995 /* Make sure we call decl_readonly_section only for trees it
10996 can handle (since it returns true for everything it doesn't
10998 if (TREE_CODE (exp
) == STRING_CST
10999 || TREE_CODE (exp
) == CONSTRUCTOR
11000 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
11001 return decl_readonly_section (exp
, 0);
11006 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11007 to the call, and TYPE is its return type.
11009 Return NULL_TREE if no simplification was possible, otherwise return the
11010 simplified form of the call as a tree.
11012 The simplified form may be a constant or other expression which
11013 computes the same value, but in a more efficient manner (including
11014 calls to other builtin functions).
11016 The call may contain arguments which need to be evaluated, but
11017 which are not useful to determine the result of the call. In
11018 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11019 COMPOUND_EXPR will be an argument which must be evaluated.
11020 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11021 COMPOUND_EXPR in the chain will contain the tree for the simplified
11022 form of the builtin function call. */
11025 fold_builtin_strstr (tree s1
, tree s2
, tree type
)
11027 if (!validate_arg (s1
, POINTER_TYPE
)
11028 || !validate_arg (s2
, POINTER_TYPE
))
11033 const char *p1
, *p2
;
11035 p2
= c_getstr (s2
);
11039 p1
= c_getstr (s1
);
11042 const char *r
= strstr (p1
, p2
);
11046 return build_int_cst (TREE_TYPE (s1
), 0);
11048 /* Return an offset into the constant string argument. */
11049 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11050 s1
, size_int (r
- p1
));
11051 return fold_convert (type
, tem
);
11054 /* The argument is const char *, and the result is char *, so we need
11055 a type conversion here to avoid a warning. */
11057 return fold_convert (type
, s1
);
11062 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
11066 /* New argument list transforming strstr(s1, s2) to
11067 strchr(s1, s2[0]). */
11068 return build_call_expr (fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
11072 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11073 the call, and TYPE is its return type.
11075 Return NULL_TREE if no simplification was possible, otherwise return the
11076 simplified form of the call as a tree.
11078 The simplified form may be a constant or other expression which
11079 computes the same value, but in a more efficient manner (including
11080 calls to other builtin functions).
11082 The call may contain arguments which need to be evaluated, but
11083 which are not useful to determine the result of the call. In
11084 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11085 COMPOUND_EXPR will be an argument which must be evaluated.
11086 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11087 COMPOUND_EXPR in the chain will contain the tree for the simplified
11088 form of the builtin function call. */
11091 fold_builtin_strchr (tree s1
, tree s2
, tree type
)
11093 if (!validate_arg (s1
, POINTER_TYPE
)
11094 || !validate_arg (s2
, INTEGER_TYPE
))
11100 if (TREE_CODE (s2
) != INTEGER_CST
)
11103 p1
= c_getstr (s1
);
11110 if (target_char_cast (s2
, &c
))
11113 r
= strchr (p1
, c
);
11116 return build_int_cst (TREE_TYPE (s1
), 0);
11118 /* Return an offset into the constant string argument. */
11119 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11120 s1
, size_int (r
- p1
));
11121 return fold_convert (type
, tem
);
11127 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11128 the call, and TYPE is its return type.
11130 Return NULL_TREE if no simplification was possible, otherwise return the
11131 simplified form of the call as a tree.
11133 The simplified form may be a constant or other expression which
11134 computes the same value, but in a more efficient manner (including
11135 calls to other builtin functions).
11137 The call may contain arguments which need to be evaluated, but
11138 which are not useful to determine the result of the call. In
11139 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11140 COMPOUND_EXPR will be an argument which must be evaluated.
11141 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11142 COMPOUND_EXPR in the chain will contain the tree for the simplified
11143 form of the builtin function call. */
11146 fold_builtin_strrchr (tree s1
, tree s2
, tree type
)
11148 if (!validate_arg (s1
, POINTER_TYPE
)
11149 || !validate_arg (s2
, INTEGER_TYPE
))
11156 if (TREE_CODE (s2
) != INTEGER_CST
)
11159 p1
= c_getstr (s1
);
11166 if (target_char_cast (s2
, &c
))
11169 r
= strrchr (p1
, c
);
11172 return build_int_cst (TREE_TYPE (s1
), 0);
11174 /* Return an offset into the constant string argument. */
11175 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11176 s1
, size_int (r
- p1
));
11177 return fold_convert (type
, tem
);
11180 if (! integer_zerop (s2
))
11183 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
11187 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11188 return build_call_expr (fn
, 2, s1
, s2
);
11192 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11193 to the call, and TYPE is its return type.
11195 Return NULL_TREE if no simplification was possible, otherwise return the
11196 simplified form of the call as a tree.
11198 The simplified form may be a constant or other expression which
11199 computes the same value, but in a more efficient manner (including
11200 calls to other builtin functions).
11202 The call may contain arguments which need to be evaluated, but
11203 which are not useful to determine the result of the call. In
11204 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11205 COMPOUND_EXPR will be an argument which must be evaluated.
11206 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11207 COMPOUND_EXPR in the chain will contain the tree for the simplified
11208 form of the builtin function call. */
11211 fold_builtin_strpbrk (tree s1
, tree s2
, tree type
)
11213 if (!validate_arg (s1
, POINTER_TYPE
)
11214 || !validate_arg (s2
, POINTER_TYPE
))
11219 const char *p1
, *p2
;
11221 p2
= c_getstr (s2
);
11225 p1
= c_getstr (s1
);
11228 const char *r
= strpbrk (p1
, p2
);
11232 return build_int_cst (TREE_TYPE (s1
), 0);
11234 /* Return an offset into the constant string argument. */
11235 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11236 s1
, size_int (r
- p1
));
11237 return fold_convert (type
, tem
);
11241 /* strpbrk(x, "") == NULL.
11242 Evaluate and ignore s1 in case it had side-effects. */
11243 return omit_one_operand (TREE_TYPE (s1
), integer_zero_node
, s1
);
11246 return NULL_TREE
; /* Really call strpbrk. */
11248 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
11252 /* New argument list transforming strpbrk(s1, s2) to
11253 strchr(s1, s2[0]). */
11254 return build_call_expr (fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
11258 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11261 Return NULL_TREE if no simplification was possible, otherwise return the
11262 simplified form of the call as a tree.
11264 The simplified form may be a constant or other expression which
11265 computes the same value, but in a more efficient manner (including
11266 calls to other builtin functions).
11268 The call may contain arguments which need to be evaluated, but
11269 which are not useful to determine the result of the call. In
11270 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11271 COMPOUND_EXPR will be an argument which must be evaluated.
11272 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11273 COMPOUND_EXPR in the chain will contain the tree for the simplified
11274 form of the builtin function call. */
11277 fold_builtin_strcat (tree dst
, tree src
)
11279 if (!validate_arg (dst
, POINTER_TYPE
)
11280 || !validate_arg (src
, POINTER_TYPE
))
11284 const char *p
= c_getstr (src
);
11286 /* If the string length is zero, return the dst parameter. */
11287 if (p
&& *p
== '\0')
11294 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11295 arguments to the call.
11297 Return NULL_TREE if no simplification was possible, otherwise return the
11298 simplified form of the call as a tree.
11300 The simplified form may be a constant or other expression which
11301 computes the same value, but in a more efficient manner (including
11302 calls to other builtin functions).
11304 The call may contain arguments which need to be evaluated, but
11305 which are not useful to determine the result of the call. In
11306 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11307 COMPOUND_EXPR will be an argument which must be evaluated.
11308 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11309 COMPOUND_EXPR in the chain will contain the tree for the simplified
11310 form of the builtin function call. */
11313 fold_builtin_strncat (tree dst
, tree src
, tree len
)
11315 if (!validate_arg (dst
, POINTER_TYPE
)
11316 || !validate_arg (src
, POINTER_TYPE
)
11317 || !validate_arg (len
, INTEGER_TYPE
))
11321 const char *p
= c_getstr (src
);
11323 /* If the requested length is zero, or the src parameter string
11324 length is zero, return the dst parameter. */
11325 if (integer_zerop (len
) || (p
&& *p
== '\0'))
11326 return omit_two_operands (TREE_TYPE (dst
), dst
, src
, len
);
11328 /* If the requested len is greater than or equal to the string
11329 length, call strcat. */
11330 if (TREE_CODE (len
) == INTEGER_CST
&& p
11331 && compare_tree_int (len
, strlen (p
)) >= 0)
11333 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCAT
];
11335 /* If the replacement _DECL isn't initialized, don't do the
11340 return build_call_expr (fn
, 2, dst
, src
);
11346 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11349 Return NULL_TREE if no simplification was possible, otherwise return the
11350 simplified form of the call as a tree.
11352 The simplified form may be a constant or other expression which
11353 computes the same value, but in a more efficient manner (including
11354 calls to other builtin functions).
11356 The call may contain arguments which need to be evaluated, but
11357 which are not useful to determine the result of the call. In
11358 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11359 COMPOUND_EXPR will be an argument which must be evaluated.
11360 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11361 COMPOUND_EXPR in the chain will contain the tree for the simplified
11362 form of the builtin function call. */
11365 fold_builtin_strspn (tree s1
, tree s2
)
11367 if (!validate_arg (s1
, POINTER_TYPE
)
11368 || !validate_arg (s2
, POINTER_TYPE
))
11372 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11374 /* If both arguments are constants, evaluate at compile-time. */
11377 const size_t r
= strspn (p1
, p2
);
11378 return size_int (r
);
11381 /* If either argument is "", return NULL_TREE. */
11382 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11383 /* Evaluate and ignore both arguments in case either one has
11385 return omit_two_operands (integer_type_node
, integer_zero_node
,
11391 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11394 Return NULL_TREE if no simplification was possible, otherwise return the
11395 simplified form of the call as a tree.
11397 The simplified form may be a constant or other expression which
11398 computes the same value, but in a more efficient manner (including
11399 calls to other builtin functions).
11401 The call may contain arguments which need to be evaluated, but
11402 which are not useful to determine the result of the call. In
11403 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11404 COMPOUND_EXPR will be an argument which must be evaluated.
11405 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11406 COMPOUND_EXPR in the chain will contain the tree for the simplified
11407 form of the builtin function call. */
11410 fold_builtin_strcspn (tree s1
, tree s2
)
11412 if (!validate_arg (s1
, POINTER_TYPE
)
11413 || !validate_arg (s2
, POINTER_TYPE
))
11417 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11419 /* If both arguments are constants, evaluate at compile-time. */
11422 const size_t r
= strcspn (p1
, p2
);
11423 return size_int (r
);
11426 /* If the first argument is "", return NULL_TREE. */
11427 if (p1
&& *p1
== '\0')
11429 /* Evaluate and ignore argument s2 in case it has
11431 return omit_one_operand (integer_type_node
,
11432 integer_zero_node
, s2
);
11435 /* If the second argument is "", return __builtin_strlen(s1). */
11436 if (p2
&& *p2
== '\0')
11438 tree fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
11440 /* If the replacement _DECL isn't initialized, don't do the
11445 return build_call_expr (fn
, 1, s1
);
11451 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11452 to the call. IGNORE is true if the value returned
11453 by the builtin will be ignored. UNLOCKED is true is true if this
11454 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11455 the known length of the string. Return NULL_TREE if no simplification
11459 fold_builtin_fputs (tree arg0
, tree arg1
, bool ignore
, bool unlocked
, tree len
)
11461 /* If we're using an unlocked function, assume the other unlocked
11462 functions exist explicitly. */
11463 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
11464 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
11465 tree
const fn_fwrite
= unlocked
? built_in_decls
[BUILT_IN_FWRITE_UNLOCKED
]
11466 : implicit_built_in_decls
[BUILT_IN_FWRITE
];
11468 /* If the return value is used, don't do the transformation. */
11472 /* Verify the arguments in the original call. */
11473 if (!validate_arg (arg0
, POINTER_TYPE
)
11474 || !validate_arg (arg1
, POINTER_TYPE
))
11478 len
= c_strlen (arg0
, 0);
11480 /* Get the length of the string passed to fputs. If the length
11481 can't be determined, punt. */
11483 || TREE_CODE (len
) != INTEGER_CST
)
11486 switch (compare_tree_int (len
, 1))
11488 case -1: /* length is 0, delete the call entirely . */
11489 return omit_one_operand (integer_type_node
, integer_zero_node
, arg1
);;
11491 case 0: /* length is 1, call fputc. */
11493 const char *p
= c_getstr (arg0
);
11498 return build_call_expr (fn_fputc
, 2,
11499 build_int_cst (NULL_TREE
, p
[0]), arg1
);
11505 case 1: /* length is greater than 1, call fwrite. */
11507 /* If optimizing for size keep fputs. */
11510 /* New argument list transforming fputs(string, stream) to
11511 fwrite(string, 1, len, stream). */
11513 return build_call_expr (fn_fwrite
, 4, arg0
, size_one_node
, len
, arg1
);
11518 gcc_unreachable ();
11523 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11524 produced. False otherwise. This is done so that we don't output the error
11525 or warning twice or three times. */
11528 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11530 tree fntype
= TREE_TYPE (current_function_decl
);
11531 int nargs
= call_expr_nargs (exp
);
11534 if (TYPE_ARG_TYPES (fntype
) == 0
11535 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
11536 == void_type_node
))
11538 error ("%<va_start%> used in function with fixed args");
11544 if (va_start_p
&& (nargs
!= 2))
11546 error ("wrong number of arguments to function %<va_start%>");
11549 arg
= CALL_EXPR_ARG (exp
, 1);
11551 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11552 when we checked the arguments and if needed issued a warning. */
11557 /* Evidently an out of date version of <stdarg.h>; can't validate
11558 va_start's second argument, but can still work as intended. */
11559 warning (0, "%<__builtin_next_arg%> called without an argument");
11562 else if (nargs
> 1)
11564 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11567 arg
= CALL_EXPR_ARG (exp
, 0);
11570 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11571 or __builtin_next_arg (0) the first time we see it, after checking
11572 the arguments and if needed issuing a warning. */
11573 if (!integer_zerop (arg
))
11575 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11577 /* Strip off all nops for the sake of the comparison. This
11578 is not quite the same as STRIP_NOPS. It does more.
11579 We must also strip off INDIRECT_EXPR for C++ reference
11581 while (CONVERT_EXPR_P (arg
)
11582 || TREE_CODE (arg
) == INDIRECT_REF
)
11583 arg
= TREE_OPERAND (arg
, 0);
11584 if (arg
!= last_parm
)
11586 /* FIXME: Sometimes with the tree optimizers we can get the
11587 not the last argument even though the user used the last
11588 argument. We just warn and set the arg to be the last
11589 argument so that we will get wrong-code because of
11591 warning (0, "second parameter of %<va_start%> not last named argument");
11593 /* We want to verify the second parameter just once before the tree
11594 optimizers are run and then avoid keeping it in the tree,
11595 as otherwise we could warn even for correct code like:
11596 void foo (int i, ...)
11597 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11599 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11601 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11607 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11608 ORIG may be null if this is a 2-argument call. We don't attempt to
11609 simplify calls with more than 3 arguments.
11611 Return NULL_TREE if no simplification was possible, otherwise return the
11612 simplified form of the call as a tree. If IGNORED is true, it means that
11613 the caller does not use the returned value of the function. */
11616 fold_builtin_sprintf (tree dest
, tree fmt
, tree orig
, int ignored
)
11619 const char *fmt_str
= NULL
;
11621 /* Verify the required arguments in the original call. We deal with two
11622 types of sprintf() calls: 'sprintf (str, fmt)' and
11623 'sprintf (dest, "%s", orig)'. */
11624 if (!validate_arg (dest
, POINTER_TYPE
)
11625 || !validate_arg (fmt
, POINTER_TYPE
))
11627 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
11630 /* Check whether the format is a literal string constant. */
11631 fmt_str
= c_getstr (fmt
);
11632 if (fmt_str
== NULL
)
11636 retval
= NULL_TREE
;
11638 if (!init_target_chars ())
11641 /* If the format doesn't contain % args or %%, use strcpy. */
11642 if (strchr (fmt_str
, target_percent
) == NULL
)
11644 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11649 /* Don't optimize sprintf (buf, "abc", ptr++). */
11653 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11654 'format' is known to contain no % formats. */
11655 call
= build_call_expr (fn
, 2, dest
, fmt
);
11657 retval
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
11660 /* If the format is "%s", use strcpy if the result isn't used. */
11661 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
11664 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11669 /* Don't crash on sprintf (str1, "%s"). */
11673 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11676 retval
= c_strlen (orig
, 1);
11677 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
11680 call
= build_call_expr (fn
, 2, dest
, orig
);
11683 if (call
&& retval
)
11685 retval
= fold_convert
11686 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls
[BUILT_IN_SPRINTF
])),
11688 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
11694 /* Expand a call EXP to __builtin_object_size. */
11697 expand_builtin_object_size (tree exp
)
11700 int object_size_type
;
11701 tree fndecl
= get_callee_fndecl (exp
);
11703 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11705 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11707 expand_builtin_trap ();
11711 ost
= CALL_EXPR_ARG (exp
, 1);
11714 if (TREE_CODE (ost
) != INTEGER_CST
11715 || tree_int_cst_sgn (ost
) < 0
11716 || compare_tree_int (ost
, 3) > 0)
11718 error ("%Klast argument of %D is not integer constant between 0 and 3",
11720 expand_builtin_trap ();
11724 object_size_type
= tree_low_cst (ost
, 0);
11726 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11729 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11730 FCODE is the BUILT_IN_* to use.
11731 Return NULL_RTX if we failed; the caller should emit a normal call,
11732 otherwise try to get the result in TARGET, if convenient (and in
11733 mode MODE if that's convenient). */
11736 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
11737 enum built_in_function fcode
)
11739 tree dest
, src
, len
, size
;
11741 if (!validate_arglist (exp
,
11743 fcode
== BUILT_IN_MEMSET_CHK
11744 ? INTEGER_TYPE
: POINTER_TYPE
,
11745 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11748 dest
= CALL_EXPR_ARG (exp
, 0);
11749 src
= CALL_EXPR_ARG (exp
, 1);
11750 len
= CALL_EXPR_ARG (exp
, 2);
11751 size
= CALL_EXPR_ARG (exp
, 3);
11753 if (! host_integerp (size
, 1))
11756 if (host_integerp (len
, 1) || integer_all_onesp (size
))
11760 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11762 warning (0, "%Kcall to %D will always overflow destination buffer",
11763 exp
, get_callee_fndecl (exp
));
11768 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11769 mem{cpy,pcpy,move,set} is available. */
11772 case BUILT_IN_MEMCPY_CHK
:
11773 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
11775 case BUILT_IN_MEMPCPY_CHK
:
11776 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
11778 case BUILT_IN_MEMMOVE_CHK
:
11779 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
11781 case BUILT_IN_MEMSET_CHK
:
11782 fn
= built_in_decls
[BUILT_IN_MEMSET
];
11791 fn
= build_call_expr (fn
, 3, dest
, src
, len
);
11792 STRIP_TYPE_NOPS (fn
);
11793 while (TREE_CODE (fn
) == COMPOUND_EXPR
)
11795 expand_expr (TREE_OPERAND (fn
, 0), const0_rtx
, VOIDmode
,
11797 fn
= TREE_OPERAND (fn
, 1);
11799 if (TREE_CODE (fn
) == CALL_EXPR
)
11800 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11801 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11803 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11807 unsigned int dest_align
11808 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
11810 /* If DEST is not a pointer type, call the normal function. */
11811 if (dest_align
== 0)
11814 /* If SRC and DEST are the same (and not volatile), do nothing. */
11815 if (operand_equal_p (src
, dest
, 0))
11819 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11821 /* Evaluate and ignore LEN in case it has side-effects. */
11822 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11823 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11826 expr
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
11827 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11830 /* __memmove_chk special case. */
11831 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11833 unsigned int src_align
11834 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
11836 if (src_align
== 0)
11839 /* If src is categorized for a readonly section we can use
11840 normal __memcpy_chk. */
11841 if (readonly_data_expr (src
))
11843 tree fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11846 fn
= build_call_expr (fn
, 4, dest
, src
, len
, size
);
11847 STRIP_TYPE_NOPS (fn
);
11848 while (TREE_CODE (fn
) == COMPOUND_EXPR
)
11850 expand_expr (TREE_OPERAND (fn
, 0), const0_rtx
, VOIDmode
,
11852 fn
= TREE_OPERAND (fn
, 1);
11854 if (TREE_CODE (fn
) == CALL_EXPR
)
11855 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11856 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11863 /* Emit warning if a buffer overflow is detected at compile time. */
11866 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11873 case BUILT_IN_STRCPY_CHK
:
11874 case BUILT_IN_STPCPY_CHK
:
11875 /* For __strcat_chk the warning will be emitted only if overflowing
11876 by at least strlen (dest) + 1 bytes. */
11877 case BUILT_IN_STRCAT_CHK
:
11878 len
= CALL_EXPR_ARG (exp
, 1);
11879 size
= CALL_EXPR_ARG (exp
, 2);
11882 case BUILT_IN_STRNCAT_CHK
:
11883 case BUILT_IN_STRNCPY_CHK
:
11884 len
= CALL_EXPR_ARG (exp
, 2);
11885 size
= CALL_EXPR_ARG (exp
, 3);
11887 case BUILT_IN_SNPRINTF_CHK
:
11888 case BUILT_IN_VSNPRINTF_CHK
:
11889 len
= CALL_EXPR_ARG (exp
, 1);
11890 size
= CALL_EXPR_ARG (exp
, 3);
11893 gcc_unreachable ();
11899 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
11904 len
= c_strlen (len
, 1);
11905 if (! len
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
11908 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11910 tree src
= CALL_EXPR_ARG (exp
, 1);
11911 if (! src
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
11913 src
= c_strlen (src
, 1);
11914 if (! src
|| ! host_integerp (src
, 1))
11916 warning (0, "%Kcall to %D might overflow destination buffer",
11917 exp
, get_callee_fndecl (exp
));
11920 else if (tree_int_cst_lt (src
, size
))
11923 else if (! host_integerp (len
, 1) || ! tree_int_cst_lt (size
, len
))
11926 warning (0, "%Kcall to %D will always overflow destination buffer",
11927 exp
, get_callee_fndecl (exp
));
11930 /* Emit warning if a buffer overflow is detected at compile time
11931 in __sprintf_chk/__vsprintf_chk calls. */
11934 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11936 tree dest
, size
, len
, fmt
, flag
;
11937 const char *fmt_str
;
11938 int nargs
= call_expr_nargs (exp
);
11940 /* Verify the required arguments in the original call. */
11944 dest
= CALL_EXPR_ARG (exp
, 0);
11945 flag
= CALL_EXPR_ARG (exp
, 1);
11946 size
= CALL_EXPR_ARG (exp
, 2);
11947 fmt
= CALL_EXPR_ARG (exp
, 3);
11949 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
11952 /* Check whether the format is a literal string constant. */
11953 fmt_str
= c_getstr (fmt
);
11954 if (fmt_str
== NULL
)
11957 if (!init_target_chars ())
11960 /* If the format doesn't contain % args or %%, we know its size. */
11961 if (strchr (fmt_str
, target_percent
) == 0)
11962 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11963 /* If the format is "%s" and first ... argument is a string literal,
11965 else if (fcode
== BUILT_IN_SPRINTF_CHK
11966 && strcmp (fmt_str
, target_percent_s
) == 0)
11972 arg
= CALL_EXPR_ARG (exp
, 4);
11973 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11976 len
= c_strlen (arg
, 1);
11977 if (!len
|| ! host_integerp (len
, 1))
11983 if (! tree_int_cst_lt (len
, size
))
11985 warning (0, "%Kcall to %D will always overflow destination buffer",
11986 exp
, get_callee_fndecl (exp
));
11990 /* Emit warning if a free is called with address of a variable. */
11993 maybe_emit_free_warning (tree exp
)
11995 tree arg
= CALL_EXPR_ARG (exp
, 0);
11998 if (TREE_CODE (arg
) != ADDR_EXPR
)
12001 arg
= get_base_address (TREE_OPERAND (arg
, 0));
12002 if (arg
== NULL
|| INDIRECT_REF_P (arg
))
12005 if (SSA_VAR_P (arg
))
12006 warning (0, "%Kattempt to free a non-heap object %qD", exp
, arg
);
12008 warning (0, "%Kattempt to free a non-heap object", exp
);
12011 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12015 fold_builtin_object_size (tree ptr
, tree ost
)
12017 tree ret
= NULL_TREE
;
12018 int object_size_type
;
12020 if (!validate_arg (ptr
, POINTER_TYPE
)
12021 || !validate_arg (ost
, INTEGER_TYPE
))
12026 if (TREE_CODE (ost
) != INTEGER_CST
12027 || tree_int_cst_sgn (ost
) < 0
12028 || compare_tree_int (ost
, 3) > 0)
12031 object_size_type
= tree_low_cst (ost
, 0);
12033 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12034 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12035 and (size_t) 0 for types 2 and 3. */
12036 if (TREE_SIDE_EFFECTS (ptr
))
12037 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
12039 if (TREE_CODE (ptr
) == ADDR_EXPR
)
12040 ret
= build_int_cstu (size_type_node
,
12041 compute_builtin_object_size (ptr
, object_size_type
));
12043 else if (TREE_CODE (ptr
) == SSA_NAME
)
12045 unsigned HOST_WIDE_INT bytes
;
12047 /* If object size is not known yet, delay folding until
12048 later. Maybe subsequent passes will help determining
12050 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12051 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2
12053 ret
= build_int_cstu (size_type_node
, bytes
);
12058 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (ret
);
12059 HOST_WIDE_INT high
= TREE_INT_CST_HIGH (ret
);
12060 if (fit_double_type (low
, high
, &low
, &high
, TREE_TYPE (ret
)))
12067 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12068 DEST, SRC, LEN, and SIZE are the arguments to the call.
12069 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12070 code of the builtin. If MAXLEN is not NULL, it is maximum length
12071 passed as third argument. */
12074 fold_builtin_memory_chk (tree fndecl
,
12075 tree dest
, tree src
, tree len
, tree size
,
12076 tree maxlen
, bool ignore
,
12077 enum built_in_function fcode
)
12081 if (!validate_arg (dest
, POINTER_TYPE
)
12082 || !validate_arg (src
,
12083 (fcode
== BUILT_IN_MEMSET_CHK
12084 ? INTEGER_TYPE
: POINTER_TYPE
))
12085 || !validate_arg (len
, INTEGER_TYPE
)
12086 || !validate_arg (size
, INTEGER_TYPE
))
12089 /* If SRC and DEST are the same (and not volatile), return DEST
12090 (resp. DEST+LEN for __mempcpy_chk). */
12091 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
12093 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12094 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
12097 tree temp
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
12098 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
12102 if (! host_integerp (size
, 1))
12105 if (! integer_all_onesp (size
))
12107 if (! host_integerp (len
, 1))
12109 /* If LEN is not constant, try MAXLEN too.
12110 For MAXLEN only allow optimizing into non-_ocs function
12111 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12112 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12114 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
12116 /* (void) __mempcpy_chk () can be optimized into
12117 (void) __memcpy_chk (). */
12118 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
12122 return build_call_expr (fn
, 4, dest
, src
, len
, size
);
12130 if (tree_int_cst_lt (size
, maxlen
))
12135 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12136 mem{cpy,pcpy,move,set} is available. */
12139 case BUILT_IN_MEMCPY_CHK
:
12140 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
12142 case BUILT_IN_MEMPCPY_CHK
:
12143 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
12145 case BUILT_IN_MEMMOVE_CHK
:
12146 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
12148 case BUILT_IN_MEMSET_CHK
:
12149 fn
= built_in_decls
[BUILT_IN_MEMSET
];
12158 return build_call_expr (fn
, 3, dest
, src
, len
);
12161 /* Fold a call to the __st[rp]cpy_chk builtin.
12162 DEST, SRC, and SIZE are the arguments to the call.
12163 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12164 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12165 strings passed as second argument. */
12168 fold_builtin_stxcpy_chk (tree fndecl
, tree dest
, tree src
, tree size
,
12169 tree maxlen
, bool ignore
,
12170 enum built_in_function fcode
)
12174 if (!validate_arg (dest
, POINTER_TYPE
)
12175 || !validate_arg (src
, POINTER_TYPE
)
12176 || !validate_arg (size
, INTEGER_TYPE
))
12179 /* If SRC and DEST are the same (and not volatile), return DEST. */
12180 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
12181 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
12183 if (! host_integerp (size
, 1))
12186 if (! integer_all_onesp (size
))
12188 len
= c_strlen (src
, 1);
12189 if (! len
|| ! host_integerp (len
, 1))
12191 /* If LEN is not constant, try MAXLEN too.
12192 For MAXLEN only allow optimizing into non-_ocs function
12193 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12194 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12196 if (fcode
== BUILT_IN_STPCPY_CHK
)
12201 /* If return value of __stpcpy_chk is ignored,
12202 optimize into __strcpy_chk. */
12203 fn
= built_in_decls
[BUILT_IN_STRCPY_CHK
];
12207 return build_call_expr (fn
, 3, dest
, src
, size
);
12210 if (! len
|| TREE_SIDE_EFFECTS (len
))
12213 /* If c_strlen returned something, but not a constant,
12214 transform __strcpy_chk into __memcpy_chk. */
12215 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
12219 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
12220 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
12221 build_call_expr (fn
, 4,
12222 dest
, src
, len
, size
));
12228 if (! tree_int_cst_lt (maxlen
, size
))
12232 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12233 fn
= built_in_decls
[fcode
== BUILT_IN_STPCPY_CHK
12234 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
];
12238 return build_call_expr (fn
, 2, dest
, src
);
12241 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12242 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12243 length passed as third argument. */
12246 fold_builtin_strncpy_chk (tree dest
, tree src
, tree len
, tree size
,
12251 if (!validate_arg (dest
, POINTER_TYPE
)
12252 || !validate_arg (src
, POINTER_TYPE
)
12253 || !validate_arg (len
, INTEGER_TYPE
)
12254 || !validate_arg (size
, INTEGER_TYPE
))
12257 if (! host_integerp (size
, 1))
12260 if (! integer_all_onesp (size
))
12262 if (! host_integerp (len
, 1))
12264 /* If LEN is not constant, try MAXLEN too.
12265 For MAXLEN only allow optimizing into non-_ocs function
12266 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12267 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12273 if (tree_int_cst_lt (size
, maxlen
))
12277 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12278 fn
= built_in_decls
[BUILT_IN_STRNCPY
];
12282 return build_call_expr (fn
, 3, dest
, src
, len
);
12285 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12286 are the arguments to the call. */
12289 fold_builtin_strcat_chk (tree fndecl
, tree dest
, tree src
, tree size
)
12294 if (!validate_arg (dest
, POINTER_TYPE
)
12295 || !validate_arg (src
, POINTER_TYPE
)
12296 || !validate_arg (size
, INTEGER_TYPE
))
12299 p
= c_getstr (src
);
12300 /* If the SRC parameter is "", return DEST. */
12301 if (p
&& *p
== '\0')
12302 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12304 if (! host_integerp (size
, 1) || ! integer_all_onesp (size
))
12307 /* If __builtin_strcat_chk is used, assume strcat is available. */
12308 fn
= built_in_decls
[BUILT_IN_STRCAT
];
12312 return build_call_expr (fn
, 2, dest
, src
);
12315 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12319 fold_builtin_strncat_chk (tree fndecl
,
12320 tree dest
, tree src
, tree len
, tree size
)
12325 if (!validate_arg (dest
, POINTER_TYPE
)
12326 || !validate_arg (src
, POINTER_TYPE
)
12327 || !validate_arg (size
, INTEGER_TYPE
)
12328 || !validate_arg (size
, INTEGER_TYPE
))
12331 p
= c_getstr (src
);
12332 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12333 if (p
&& *p
== '\0')
12334 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
12335 else if (integer_zerop (len
))
12336 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12338 if (! host_integerp (size
, 1))
12341 if (! integer_all_onesp (size
))
12343 tree src_len
= c_strlen (src
, 1);
12345 && host_integerp (src_len
, 1)
12346 && host_integerp (len
, 1)
12347 && ! tree_int_cst_lt (len
, src_len
))
12349 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12350 fn
= built_in_decls
[BUILT_IN_STRCAT_CHK
];
12354 return build_call_expr (fn
, 3, dest
, src
, size
);
12359 /* If __builtin_strncat_chk is used, assume strncat is available. */
12360 fn
= built_in_decls
[BUILT_IN_STRNCAT
];
12364 return build_call_expr (fn
, 3, dest
, src
, len
);
12367 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12368 a normal call should be emitted rather than expanding the function
12369 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12372 fold_builtin_sprintf_chk (tree exp
, enum built_in_function fcode
)
12374 tree dest
, size
, len
, fn
, fmt
, flag
;
12375 const char *fmt_str
;
12376 int nargs
= call_expr_nargs (exp
);
12378 /* Verify the required arguments in the original call. */
12381 dest
= CALL_EXPR_ARG (exp
, 0);
12382 if (!validate_arg (dest
, POINTER_TYPE
))
12384 flag
= CALL_EXPR_ARG (exp
, 1);
12385 if (!validate_arg (flag
, INTEGER_TYPE
))
12387 size
= CALL_EXPR_ARG (exp
, 2);
12388 if (!validate_arg (size
, INTEGER_TYPE
))
12390 fmt
= CALL_EXPR_ARG (exp
, 3);
12391 if (!validate_arg (fmt
, POINTER_TYPE
))
12394 if (! host_integerp (size
, 1))
12399 if (!init_target_chars ())
12402 /* Check whether the format is a literal string constant. */
12403 fmt_str
= c_getstr (fmt
);
12404 if (fmt_str
!= NULL
)
12406 /* If the format doesn't contain % args or %%, we know the size. */
12407 if (strchr (fmt_str
, target_percent
) == 0)
12409 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
12410 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12412 /* If the format is "%s" and first ... argument is a string literal,
12413 we know the size too. */
12414 else if (fcode
== BUILT_IN_SPRINTF_CHK
12415 && strcmp (fmt_str
, target_percent_s
) == 0)
12421 arg
= CALL_EXPR_ARG (exp
, 4);
12422 if (validate_arg (arg
, POINTER_TYPE
))
12424 len
= c_strlen (arg
, 1);
12425 if (! len
|| ! host_integerp (len
, 1))
12432 if (! integer_all_onesp (size
))
12434 if (! len
|| ! tree_int_cst_lt (len
, size
))
12438 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12439 or if format doesn't contain % chars or is "%s". */
12440 if (! integer_zerop (flag
))
12442 if (fmt_str
== NULL
)
12444 if (strchr (fmt_str
, target_percent
) != NULL
12445 && strcmp (fmt_str
, target_percent_s
))
12449 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12450 fn
= built_in_decls
[fcode
== BUILT_IN_VSPRINTF_CHK
12451 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
];
12455 return rewrite_call_expr (exp
, 4, fn
, 2, dest
, fmt
);
12458 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12459 a normal call should be emitted rather than expanding the function
12460 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12461 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12462 passed as second argument. */
12465 fold_builtin_snprintf_chk (tree exp
, tree maxlen
,
12466 enum built_in_function fcode
)
12468 tree dest
, size
, len
, fn
, fmt
, flag
;
12469 const char *fmt_str
;
12471 /* Verify the required arguments in the original call. */
12472 if (call_expr_nargs (exp
) < 5)
12474 dest
= CALL_EXPR_ARG (exp
, 0);
12475 if (!validate_arg (dest
, POINTER_TYPE
))
12477 len
= CALL_EXPR_ARG (exp
, 1);
12478 if (!validate_arg (len
, INTEGER_TYPE
))
12480 flag
= CALL_EXPR_ARG (exp
, 2);
12481 if (!validate_arg (flag
, INTEGER_TYPE
))
12483 size
= CALL_EXPR_ARG (exp
, 3);
12484 if (!validate_arg (size
, INTEGER_TYPE
))
12486 fmt
= CALL_EXPR_ARG (exp
, 4);
12487 if (!validate_arg (fmt
, POINTER_TYPE
))
12490 if (! host_integerp (size
, 1))
12493 if (! integer_all_onesp (size
))
12495 if (! host_integerp (len
, 1))
12497 /* If LEN is not constant, try MAXLEN too.
12498 For MAXLEN only allow optimizing into non-_ocs function
12499 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12500 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12506 if (tree_int_cst_lt (size
, maxlen
))
12510 if (!init_target_chars ())
12513 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12514 or if format doesn't contain % chars or is "%s". */
12515 if (! integer_zerop (flag
))
12517 fmt_str
= c_getstr (fmt
);
12518 if (fmt_str
== NULL
)
12520 if (strchr (fmt_str
, target_percent
) != NULL
12521 && strcmp (fmt_str
, target_percent_s
))
12525 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12527 fn
= built_in_decls
[fcode
== BUILT_IN_VSNPRINTF_CHK
12528 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
];
12532 return rewrite_call_expr (exp
, 5, fn
, 3, dest
, len
, fmt
);
12535 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12536 FMT and ARG are the arguments to the call; we don't fold cases with
12537 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12539 Return NULL_TREE if no simplification was possible, otherwise return the
12540 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12541 code of the function to be simplified. */
12544 fold_builtin_printf (tree fndecl
, tree fmt
, tree arg
, bool ignore
,
12545 enum built_in_function fcode
)
12547 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
12548 const char *fmt_str
= NULL
;
12550 /* If the return value is used, don't do the transformation. */
12554 /* Verify the required arguments in the original call. */
12555 if (!validate_arg (fmt
, POINTER_TYPE
))
12558 /* Check whether the format is a literal string constant. */
12559 fmt_str
= c_getstr (fmt
);
12560 if (fmt_str
== NULL
)
12563 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
12565 /* If we're using an unlocked function, assume the other
12566 unlocked functions exist explicitly. */
12567 fn_putchar
= built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
];
12568 fn_puts
= built_in_decls
[BUILT_IN_PUTS_UNLOCKED
];
12572 fn_putchar
= implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
12573 fn_puts
= implicit_built_in_decls
[BUILT_IN_PUTS
];
12576 if (!init_target_chars ())
12579 if (strcmp (fmt_str
, target_percent_s
) == 0
12580 || strchr (fmt_str
, target_percent
) == NULL
)
12584 if (strcmp (fmt_str
, target_percent_s
) == 0)
12586 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12589 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12592 str
= c_getstr (arg
);
12598 /* The format specifier doesn't contain any '%' characters. */
12599 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
12605 /* If the string was "", printf does nothing. */
12606 if (str
[0] == '\0')
12607 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12609 /* If the string has length of 1, call putchar. */
12610 if (str
[1] == '\0')
12612 /* Given printf("c"), (where c is any one character,)
12613 convert "c"[0] to an int and pass that to the replacement
12615 newarg
= build_int_cst (NULL_TREE
, str
[0]);
12617 call
= build_call_expr (fn_putchar
, 1, newarg
);
12621 /* If the string was "string\n", call puts("string"). */
12622 size_t len
= strlen (str
);
12623 if ((unsigned char)str
[len
- 1] == target_newline
)
12625 /* Create a NUL-terminated string that's one char shorter
12626 than the original, stripping off the trailing '\n'. */
12627 char *newstr
= XALLOCAVEC (char, len
);
12628 memcpy (newstr
, str
, len
- 1);
12629 newstr
[len
- 1] = 0;
12631 newarg
= build_string_literal (len
, newstr
);
12633 call
= build_call_expr (fn_puts
, 1, newarg
);
12636 /* We'd like to arrange to call fputs(string,stdout) here,
12637 but we need stdout and don't have a way to get it yet. */
12642 /* The other optimizations can be done only on the non-va_list variants. */
12643 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12646 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12647 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
12649 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12652 call
= build_call_expr (fn_puts
, 1, arg
);
12655 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12656 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12658 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12661 call
= build_call_expr (fn_putchar
, 1, arg
);
12667 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12670 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12671 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12672 more than 3 arguments, and ARG may be null in the 2-argument case.
12674 Return NULL_TREE if no simplification was possible, otherwise return the
12675 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12676 code of the function to be simplified. */
12679 fold_builtin_fprintf (tree fndecl
, tree fp
, tree fmt
, tree arg
, bool ignore
,
12680 enum built_in_function fcode
)
12682 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
12683 const char *fmt_str
= NULL
;
12685 /* If the return value is used, don't do the transformation. */
12689 /* Verify the required arguments in the original call. */
12690 if (!validate_arg (fp
, POINTER_TYPE
))
12692 if (!validate_arg (fmt
, POINTER_TYPE
))
12695 /* Check whether the format is a literal string constant. */
12696 fmt_str
= c_getstr (fmt
);
12697 if (fmt_str
== NULL
)
12700 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
12702 /* If we're using an unlocked function, assume the other
12703 unlocked functions exist explicitly. */
12704 fn_fputc
= built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
];
12705 fn_fputs
= built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
];
12709 fn_fputc
= implicit_built_in_decls
[BUILT_IN_FPUTC
];
12710 fn_fputs
= implicit_built_in_decls
[BUILT_IN_FPUTS
];
12713 if (!init_target_chars ())
12716 /* If the format doesn't contain % args or %%, use strcpy. */
12717 if (strchr (fmt_str
, target_percent
) == NULL
)
12719 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
12723 /* If the format specifier was "", fprintf does nothing. */
12724 if (fmt_str
[0] == '\0')
12726 /* If FP has side-effects, just wait until gimplification is
12728 if (TREE_SIDE_EFFECTS (fp
))
12731 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12734 /* When "string" doesn't contain %, replace all cases of
12735 fprintf (fp, string) with fputs (string, fp). The fputs
12736 builtin will take care of special cases like length == 1. */
12738 call
= build_call_expr (fn_fputs
, 2, fmt
, fp
);
12741 /* The other optimizations can be done only on the non-va_list variants. */
12742 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
12745 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12746 else if (strcmp (fmt_str
, target_percent_s
) == 0)
12748 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12751 call
= build_call_expr (fn_fputs
, 2, arg
, fp
);
12754 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12755 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12757 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12760 call
= build_call_expr (fn_fputc
, 2, arg
, fp
);
12765 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12768 /* Initialize format string characters in the target charset. */
12771 init_target_chars (void)
12776 target_newline
= lang_hooks
.to_target_charset ('\n');
12777 target_percent
= lang_hooks
.to_target_charset ('%');
12778 target_c
= lang_hooks
.to_target_charset ('c');
12779 target_s
= lang_hooks
.to_target_charset ('s');
12780 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
12784 target_percent_c
[0] = target_percent
;
12785 target_percent_c
[1] = target_c
;
12786 target_percent_c
[2] = '\0';
12788 target_percent_s
[0] = target_percent
;
12789 target_percent_s
[1] = target_s
;
12790 target_percent_s
[2] = '\0';
12792 target_percent_s_newline
[0] = target_percent
;
12793 target_percent_s_newline
[1] = target_s
;
12794 target_percent_s_newline
[2] = target_newline
;
12795 target_percent_s_newline
[3] = '\0';
12802 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12803 and no overflow/underflow occurred. INEXACT is true if M was not
12804 exactly calculated. TYPE is the tree type for the result. This
12805 function assumes that you cleared the MPFR flags and then
12806 calculated M to see if anything subsequently set a flag prior to
12807 entering this function. Return NULL_TREE if any checks fail. */
12810 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
12812 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12813 overflow/underflow occurred. If -frounding-math, proceed iff the
12814 result of calling FUNC was exact. */
12815 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12816 && (!flag_rounding_math
|| !inexact
))
12818 REAL_VALUE_TYPE rr
;
12820 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
12821 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12822 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12823 but the mpft_t is not, then we underflowed in the
12825 if (real_isfinite (&rr
)
12826 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
12828 REAL_VALUE_TYPE rmode
;
12830 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
12831 /* Proceed iff the specified mode can hold the value. */
12832 if (real_identical (&rmode
, &rr
))
12833 return build_real (type
, rmode
);
12839 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12840 FUNC on it and return the resulting value as a tree with type TYPE.
12841 If MIN and/or MAX are not NULL, then the supplied ARG must be
12842 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12843 acceptable values, otherwise they are not. The mpfr precision is
12844 set to the precision of TYPE. We assume that function FUNC returns
12845 zero if the result could be calculated exactly within the requested
12849 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
12850 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
12853 tree result
= NULL_TREE
;
12857 /* To proceed, MPFR must exactly represent the target floating point
12858 format, which only happens when the target base equals two. */
12859 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12860 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
12862 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12864 if (real_isfinite (ra
)
12865 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
12866 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
12868 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12872 mpfr_init2 (m
, prec
);
12873 mpfr_from_real (m
, ra
, GMP_RNDN
);
12874 mpfr_clear_flags ();
12875 inexact
= func (m
, m
, GMP_RNDN
);
12876 result
= do_mpfr_ckconv (m
, type
, inexact
);
12884 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12885 FUNC on it and return the resulting value as a tree with type TYPE.
12886 The mpfr precision is set to the precision of TYPE. We assume that
12887 function FUNC returns zero if the result could be calculated
12888 exactly within the requested precision. */
12891 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
12892 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12894 tree result
= NULL_TREE
;
12899 /* To proceed, MPFR must exactly represent the target floating point
12900 format, which only happens when the target base equals two. */
12901 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12902 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12903 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12905 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12906 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12908 if (real_isfinite (ra1
) && real_isfinite (ra2
))
12910 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12914 mpfr_inits2 (prec
, m1
, m2
, NULL
);
12915 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12916 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
12917 mpfr_clear_flags ();
12918 inexact
= func (m1
, m1
, m2
, GMP_RNDN
);
12919 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12920 mpfr_clears (m1
, m2
, NULL
);
12927 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12928 FUNC on it and return the resulting value as a tree with type TYPE.
12929 The mpfr precision is set to the precision of TYPE. We assume that
12930 function FUNC returns zero if the result could be calculated
12931 exactly within the requested precision. */
12934 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
12935 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12937 tree result
= NULL_TREE
;
12943 /* To proceed, MPFR must exactly represent the target floating point
12944 format, which only happens when the target base equals two. */
12945 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12946 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12947 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
12948 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
12950 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12951 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12952 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
12954 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
12956 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12960 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
12961 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12962 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
12963 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
12964 mpfr_clear_flags ();
12965 inexact
= func (m1
, m1
, m2
, m3
, GMP_RNDN
);
12966 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12967 mpfr_clears (m1
, m2
, m3
, NULL
);
12974 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12975 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12976 If ARG_SINP and ARG_COSP are NULL then the result is returned
12977 as a complex value.
12978 The type is taken from the type of ARG and is used for setting the
12979 precision of the calculation and results. */
12982 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
12984 tree
const type
= TREE_TYPE (arg
);
12985 tree result
= NULL_TREE
;
12989 /* To proceed, MPFR must exactly represent the target floating point
12990 format, which only happens when the target base equals two. */
12991 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12992 && TREE_CODE (arg
) == REAL_CST
12993 && !TREE_OVERFLOW (arg
))
12995 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12997 if (real_isfinite (ra
))
12999 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
13000 tree result_s
, result_c
;
13004 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
13005 mpfr_from_real (m
, ra
, GMP_RNDN
);
13006 mpfr_clear_flags ();
13007 inexact
= mpfr_sin_cos (ms
, mc
, m
, GMP_RNDN
);
13008 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
13009 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
13010 mpfr_clears (m
, ms
, mc
, NULL
);
13011 if (result_s
&& result_c
)
13013 /* If we are to return in a complex value do so. */
13014 if (!arg_sinp
&& !arg_cosp
)
13015 return build_complex (build_complex_type (type
),
13016 result_c
, result_s
);
13018 /* Dereference the sin/cos pointer arguments. */
13019 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
13020 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
13021 /* Proceed if valid pointer type were passed in. */
13022 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
13023 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
13025 /* Set the values. */
13026 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
13028 TREE_SIDE_EFFECTS (result_s
) = 1;
13029 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
13031 TREE_SIDE_EFFECTS (result_c
) = 1;
13032 /* Combine the assignments into a compound expr. */
13033 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13034 result_s
, result_c
));
13042 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
13043 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13044 two-argument mpfr order N Bessel function FUNC on them and return
13045 the resulting value as a tree with type TYPE. The mpfr precision
13046 is set to the precision of TYPE. We assume that function FUNC
13047 returns zero if the result could be calculated exactly within the
13048 requested precision. */
13050 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
13051 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
13052 const REAL_VALUE_TYPE
*min
, bool inclusive
)
13054 tree result
= NULL_TREE
;
13059 /* To proceed, MPFR must exactly represent the target floating point
13060 format, which only happens when the target base equals two. */
13061 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13062 && host_integerp (arg1
, 0)
13063 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13065 const HOST_WIDE_INT n
= tree_low_cst(arg1
, 0);
13066 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
13069 && real_isfinite (ra
)
13070 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
13072 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
13076 mpfr_init2 (m
, prec
);
13077 mpfr_from_real (m
, ra
, GMP_RNDN
);
13078 mpfr_clear_flags ();
13079 inexact
= func (m
, n
, m
, GMP_RNDN
);
13080 result
= do_mpfr_ckconv (m
, type
, inexact
);
13088 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13089 the pointer *(ARG_QUO) and return the result. The type is taken
13090 from the type of ARG0 and is used for setting the precision of the
13091 calculation and results. */
13094 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
13096 tree
const type
= TREE_TYPE (arg0
);
13097 tree result
= NULL_TREE
;
13102 /* To proceed, MPFR must exactly represent the target floating point
13103 format, which only happens when the target base equals two. */
13104 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13105 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
13106 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
13108 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
13109 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
13111 if (real_isfinite (ra0
) && real_isfinite (ra1
))
13113 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
13118 mpfr_inits2 (prec
, m0
, m1
, NULL
);
13119 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
13120 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13121 mpfr_clear_flags ();
13122 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, GMP_RNDN
);
13123 /* Remquo is independent of the rounding mode, so pass
13124 inexact=0 to do_mpfr_ckconv(). */
13125 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
13126 mpfr_clears (m0
, m1
, NULL
);
13129 /* MPFR calculates quo in the host's long so it may
13130 return more bits in quo than the target int can hold
13131 if sizeof(host long) > sizeof(target int). This can
13132 happen even for native compilers in LP64 mode. In
13133 these cases, modulo the quo value with the largest
13134 number that the target int can hold while leaving one
13135 bit for the sign. */
13136 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
13137 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
13139 /* Dereference the quo pointer argument. */
13140 arg_quo
= build_fold_indirect_ref (arg_quo
);
13141 /* Proceed iff a valid pointer type was passed in. */
13142 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
13144 /* Set the value. */
13145 tree result_quo
= fold_build2 (MODIFY_EXPR
,
13146 TREE_TYPE (arg_quo
), arg_quo
,
13147 build_int_cst (NULL
, integer_quo
));
13148 TREE_SIDE_EFFECTS (result_quo
) = 1;
13149 /* Combine the quo assignment with the rem. */
13150 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13151 result_quo
, result_rem
));
13159 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13160 resulting value as a tree with type TYPE. The mpfr precision is
13161 set to the precision of TYPE. We assume that this mpfr function
13162 returns zero if the result could be calculated exactly within the
13163 requested precision. In addition, the integer pointer represented
13164 by ARG_SG will be dereferenced and set to the appropriate signgam
13168 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
13170 tree result
= NULL_TREE
;
13174 /* To proceed, MPFR must exactly represent the target floating point
13175 format, which only happens when the target base equals two. Also
13176 verify ARG is a constant and that ARG_SG is an int pointer. */
13177 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13178 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
13179 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
13180 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
13182 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
13184 /* In addition to NaN and Inf, the argument cannot be zero or a
13185 negative integer. */
13186 if (real_isfinite (ra
)
13187 && ra
->cl
!= rvc_zero
13188 && !(real_isneg(ra
) && real_isinteger(ra
, TYPE_MODE (type
))))
13190 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
13195 mpfr_init2 (m
, prec
);
13196 mpfr_from_real (m
, ra
, GMP_RNDN
);
13197 mpfr_clear_flags ();
13198 inexact
= mpfr_lgamma (m
, &sg
, m
, GMP_RNDN
);
13199 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
13205 /* Dereference the arg_sg pointer argument. */
13206 arg_sg
= build_fold_indirect_ref (arg_sg
);
13207 /* Assign the signgam value into *arg_sg. */
13208 result_sg
= fold_build2 (MODIFY_EXPR
,
13209 TREE_TYPE (arg_sg
), arg_sg
,
13210 build_int_cst (NULL
, sg
));
13211 TREE_SIDE_EFFECTS (result_sg
) = 1;
13212 /* Combine the signgam assignment with the lgamma result. */
13213 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13214 result_sg
, result_lg
));
13224 The functions below provide an alternate interface for folding
13225 builtin function calls presented as GIMPLE_CALL statements rather
13226 than as CALL_EXPRs. The folded result is still expressed as a
13227 tree. There is too much code duplication in the handling of
13228 varargs functions, and a more intrusive re-factoring would permit
13229 better sharing of code between the tree and statement-based
13230 versions of these functions. */
13232 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13233 along with N new arguments specified as the "..." parameters. SKIP
13234 is the number of arguments in STMT to be omitted. This function is used
13235 to do varargs-to-varargs transformations. */
13238 gimple_rewrite_call_expr (gimple stmt
, int skip
, tree fndecl
, int n
, ...)
13240 int oldnargs
= gimple_call_num_args (stmt
);
13241 int nargs
= oldnargs
- skip
+ n
;
13242 tree fntype
= TREE_TYPE (fndecl
);
13243 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
13248 buffer
= XALLOCAVEC (tree
, nargs
);
13250 for (i
= 0; i
< n
; i
++)
13251 buffer
[i
] = va_arg (ap
, tree
);
13253 for (j
= skip
; j
< oldnargs
; j
++, i
++)
13254 buffer
[i
] = gimple_call_arg (stmt
, j
);
13256 return fold (build_call_array (TREE_TYPE (fntype
), fn
, nargs
, buffer
));
13259 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13260 a normal call should be emitted rather than expanding the function
13261 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13264 gimple_fold_builtin_sprintf_chk (gimple stmt
, enum built_in_function fcode
)
13266 tree dest
, size
, len
, fn
, fmt
, flag
;
13267 const char *fmt_str
;
13268 int nargs
= gimple_call_num_args (stmt
);
13270 /* Verify the required arguments in the original call. */
13273 dest
= gimple_call_arg (stmt
, 0);
13274 if (!validate_arg (dest
, POINTER_TYPE
))
13276 flag
= gimple_call_arg (stmt
, 1);
13277 if (!validate_arg (flag
, INTEGER_TYPE
))
13279 size
= gimple_call_arg (stmt
, 2);
13280 if (!validate_arg (size
, INTEGER_TYPE
))
13282 fmt
= gimple_call_arg (stmt
, 3);
13283 if (!validate_arg (fmt
, POINTER_TYPE
))
13286 if (! host_integerp (size
, 1))
13291 if (!init_target_chars ())
13294 /* Check whether the format is a literal string constant. */
13295 fmt_str
= c_getstr (fmt
);
13296 if (fmt_str
!= NULL
)
13298 /* If the format doesn't contain % args or %%, we know the size. */
13299 if (strchr (fmt_str
, target_percent
) == 0)
13301 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
13302 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
13304 /* If the format is "%s" and first ... argument is a string literal,
13305 we know the size too. */
13306 else if (fcode
== BUILT_IN_SPRINTF_CHK
13307 && strcmp (fmt_str
, target_percent_s
) == 0)
13313 arg
= gimple_call_arg (stmt
, 4);
13314 if (validate_arg (arg
, POINTER_TYPE
))
13316 len
= c_strlen (arg
, 1);
13317 if (! len
|| ! host_integerp (len
, 1))
13324 if (! integer_all_onesp (size
))
13326 if (! len
|| ! tree_int_cst_lt (len
, size
))
13330 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13331 or if format doesn't contain % chars or is "%s". */
13332 if (! integer_zerop (flag
))
13334 if (fmt_str
== NULL
)
13336 if (strchr (fmt_str
, target_percent
) != NULL
13337 && strcmp (fmt_str
, target_percent_s
))
13341 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13342 fn
= built_in_decls
[fcode
== BUILT_IN_VSPRINTF_CHK
13343 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
];
13347 return gimple_rewrite_call_expr (stmt
, 4, fn
, 2, dest
, fmt
);
13350 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13351 a normal call should be emitted rather than expanding the function
13352 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13353 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13354 passed as second argument. */
13357 gimple_fold_builtin_snprintf_chk (gimple stmt
, tree maxlen
,
13358 enum built_in_function fcode
)
13360 tree dest
, size
, len
, fn
, fmt
, flag
;
13361 const char *fmt_str
;
13363 /* Verify the required arguments in the original call. */
13364 if (gimple_call_num_args (stmt
) < 5)
13366 dest
= gimple_call_arg (stmt
, 0);
13367 if (!validate_arg (dest
, POINTER_TYPE
))
13369 len
= gimple_call_arg (stmt
, 1);
13370 if (!validate_arg (len
, INTEGER_TYPE
))
13372 flag
= gimple_call_arg (stmt
, 2);
13373 if (!validate_arg (flag
, INTEGER_TYPE
))
13375 size
= gimple_call_arg (stmt
, 3);
13376 if (!validate_arg (size
, INTEGER_TYPE
))
13378 fmt
= gimple_call_arg (stmt
, 4);
13379 if (!validate_arg (fmt
, POINTER_TYPE
))
13382 if (! host_integerp (size
, 1))
13385 if (! integer_all_onesp (size
))
13387 if (! host_integerp (len
, 1))
13389 /* If LEN is not constant, try MAXLEN too.
13390 For MAXLEN only allow optimizing into non-_ocs function
13391 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13392 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
13398 if (tree_int_cst_lt (size
, maxlen
))
13402 if (!init_target_chars ())
13405 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13406 or if format doesn't contain % chars or is "%s". */
13407 if (! integer_zerop (flag
))
13409 fmt_str
= c_getstr (fmt
);
13410 if (fmt_str
== NULL
)
13412 if (strchr (fmt_str
, target_percent
) != NULL
13413 && strcmp (fmt_str
, target_percent_s
))
13417 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13419 fn
= built_in_decls
[fcode
== BUILT_IN_VSNPRINTF_CHK
13420 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
];
13424 return gimple_rewrite_call_expr (stmt
, 5, fn
, 3, dest
, len
, fmt
);
13427 /* Builtins with folding operations that operate on "..." arguments
13428 need special handling; we need to store the arguments in a convenient
13429 data structure before attempting any folding. Fortunately there are
13430 only a few builtins that fall into this category. FNDECL is the
13431 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13432 result of the function call is ignored. */
13435 gimple_fold_builtin_varargs (tree fndecl
, gimple stmt
, bool ignore ATTRIBUTE_UNUSED
)
13437 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
13438 tree ret
= NULL_TREE
;
13442 case BUILT_IN_SPRINTF_CHK
:
13443 case BUILT_IN_VSPRINTF_CHK
:
13444 ret
= gimple_fold_builtin_sprintf_chk (stmt
, fcode
);
13447 case BUILT_IN_SNPRINTF_CHK
:
13448 case BUILT_IN_VSNPRINTF_CHK
:
13449 ret
= gimple_fold_builtin_snprintf_chk (stmt
, NULL_TREE
, fcode
);
13456 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
13457 TREE_NO_WARNING (ret
) = 1;
13463 /* A wrapper function for builtin folding that prevents warnings for
13464 "statement without effect" and the like, caused by removing the
13465 call node earlier than the warning is generated. */
13468 fold_call_stmt (gimple stmt
, bool ignore
)
13470 tree ret
= NULL_TREE
;
13471 tree fndecl
= gimple_call_fndecl (stmt
);
13473 && TREE_CODE (fndecl
) == FUNCTION_DECL
13474 && DECL_BUILT_IN (fndecl
)
13475 && !gimple_call_va_arg_pack_p (stmt
))
13477 int nargs
= gimple_call_num_args (stmt
);
13479 /* FIXME: Don't use a list in this interface. */
13480 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
13482 tree arglist
= NULL_TREE
;
13484 for (i
= nargs
- 1; i
>= 0; i
--)
13485 arglist
= tree_cons (NULL_TREE
, gimple_call_arg (stmt
, i
), arglist
);
13486 return targetm
.fold_builtin (fndecl
, arglist
, ignore
);
13490 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
13492 tree args
[MAX_ARGS_TO_FOLD_BUILTIN
];
13494 for (i
= 0; i
< nargs
; i
++)
13495 args
[i
] = gimple_call_arg (stmt
, i
);
13496 ret
= fold_builtin_n (fndecl
, args
, nargs
, ignore
);
13499 ret
= gimple_fold_builtin_varargs (fndecl
, stmt
, ignore
);
13502 /* Propagate location information from original call to
13503 expansion of builtin. Otherwise things like
13504 maybe_emit_chk_warning, that operate on the expansion
13505 of a builtin, will use the wrong location information. */
13506 if (gimple_has_location (stmt
))
13508 tree realret
= ret
;
13509 if (TREE_CODE (ret
) == NOP_EXPR
)
13510 realret
= TREE_OPERAND (ret
, 0);
13511 if (CAN_HAVE_LOCATION_P (realret
)
13512 && !EXPR_HAS_LOCATION (realret
))
13513 SET_EXPR_LOCATION (realret
, gimple_location (stmt
));